From dc247e729e6ba4b123de6b5fd4b491d3bd40de29 Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Thu, 24 Oct 2024 10:44:04 +0100 Subject: [PATCH 01/74] first draft --- lib/iris/fileformats/cf.py | 65 ++++++++++++++++++++++++++++++-------- 1 file changed, 52 insertions(+), 13 deletions(-) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 024bcb6f1d..b73664fff5 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1425,7 +1425,41 @@ def _translate(self): self.cf_group[cf_name] = CFAuxiliaryCoordinateVariable( cf_name, cf_var.cf_data ) - self.cf_group[cf_name].add_formula_term(cf_root, cf_term) + self.cf_group[cf_name].add_formula_term(cf_root, cf_term) + + if cf_root not in self.cf_group.bounds: + # Check if cf_root has a bounds attribute. + if cf_root in self.cf_group.coordinates: + # Need to generalise this for if it's a dim or aux coord. + bounds_name = getattr( + self.cf_group.coordinates[cf_root], "bounds", None + ) + if bounds_name is not None: + form_terms = getattr( + self.cf_group.coordinates[cf_root], "formula_terms" + ) + form_terms = form_terms.replace(":", "") + form_terms = form_terms.split(" ") + example_dict = {"a": "A", "b": "B", "ps": "PS", "p0": "P0"} + for cf_vari in formula_terms.values(): + for ( + cf_roots, + cf_terms, + ) in cf_vari.cf_terms_by_root.items(): + if cf_terms == cf_term: + if ( + cf_roots in self.cf_group.bounds + and cf_roots == bounds_name + ): + if cf_terms in form_terms: + to_attach_to = example_dict[cf_terms] + attach_from = cf_vari.cf_name + if ( + to_attach_to.lower() + != attach_from.lower() + ): + cf_var.bounds = cf_vari + print(cf_vari.bounds) # Determine the CF data variables. data_variable_names = ( @@ -1553,18 +1587,23 @@ def _build(cf_variable): # may be promoted to a CFDataVariable and restrict promotion to only # those formula terms that are reference surface/phenomenon. for cf_var in self.cf_group.formula_terms.values(): - for cf_root, cf_term in cf_var.cf_terms_by_root.items(): - cf_root_var = self.cf_group[cf_root] - name = cf_root_var.standard_name or cf_root_var.long_name - terms = reference_terms.get(name, []) - if isinstance(terms, str) or not isinstance(terms, Iterable): - terms = [terms] - cf_var_name = cf_var.cf_name - if cf_term in terms and cf_var_name not in self.cf_group.promoted: - data_var = CFDataVariable(cf_var_name, cf_var.cf_data) - self.cf_group.promoted[cf_var_name] = data_var - _build(data_var) - break + if self.cf_group[cf_var.cf_name] is CFBoundaryVariable: + continue + else: + for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + cf_root_var = self.cf_group[cf_root] + if not hasattr(cf_root_var, "standard_name"): + continue + name = cf_root_var.standard_name or cf_root_var.long_name + terms = reference_terms.get(name, []) + if isinstance(terms, str) or not isinstance(terms, Iterable): + terms = [terms] + cf_var_name = cf_var.cf_name + if cf_term in terms and cf_var_name not in self.cf_group.promoted: + data_var = CFDataVariable(cf_var_name, cf_var.cf_data) + self.cf_group.promoted[cf_var_name] = data_var + _build(data_var) + break # Promote any ignored variables. promoted = set() not_promoted = ignored.difference(promoted) From 989c9ec84dc87ead6d9e8d26e0c9cfa7fe5452d2 Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 25 Oct 2024 15:44:30 +0100 Subject: [PATCH 02/74] WIP --- lib/iris/fileformats/cf.py | 43 ++++++++++++++----- .../unit/fileformats/cf/test_CFReader.py | 10 +++-- 2 files changed, 39 insertions(+), 14 deletions(-) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index b73664fff5..0b67a58c2e 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1418,16 +1418,19 @@ def _translate(self): for cf_var in formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + + # Ignore formula terms owned by a bounds variable. - if cf_root not in self.cf_group.bounds: - cf_name = cf_var.cf_name - if cf_var.cf_name not in self.cf_group: - self.cf_group[cf_name] = CFAuxiliaryCoordinateVariable( - cf_name, cf_var.cf_data - ) - self.cf_group[cf_name].add_formula_term(cf_root, cf_term) + # if cf_root not in self.cf_group.bounds: + # cf_name = cf_var.cf_name + # if cf_var.cf_name not in self.cf_group: + # self.cf_group[cf_name] = CFAuxiliaryCoordinateVariable( + # cf_name, cf_var.cf_data + # ) + # self.cf_group[cf_name].add_formula_term(cf_root, cf_term) if cf_root not in self.cf_group.bounds: + # Check if cf_root has a bounds attribute. if cf_root in self.cf_group.coordinates: # Need to generalise this for if it's a dim or aux coord. @@ -1440,7 +1443,7 @@ def _translate(self): ) form_terms = form_terms.replace(":", "") form_terms = form_terms.split(" ") - example_dict = {"a": "A", "b": "B", "ps": "PS", "p0": "P0"} + example_dict = {"a": "A", "b": "B", "ps": "PS", "p0": "P0", "orog": "orography"} for cf_vari in formula_terms.values(): for ( cf_roots, @@ -1458,16 +1461,29 @@ def _translate(self): to_attach_to.lower() != attach_from.lower() ): - cf_var.bounds = cf_vari - print(cf_vari.bounds) + self.cf_group[cf_vari.cf_name] = CFBoundaryVariable(cf_vari.cf_name, cf_vari.cf_data) + cf_var.bounds = cf_vari.cf_name + + if cf_root not in self.cf_group.bounds: + cf_name = cf_var.cf_name + if cf_var.cf_name not in self.cf_group: + new_var = CFAuxiliaryCoordinateVariable( + cf_name, cf_var.cf_data + ) + if hasattr(cf_var, "bounds"): + new_var.bounds = cf_var.bounds + self.cf_group[cf_name] = new_var + self.cf_group[cf_name].add_formula_term(cf_root, cf_term) # Determine the CF data variables. data_variable_names = ( set(netcdf_variable_names) - self.cf_group.non_data_variable_names ) + print("name") for name in data_variable_names: self.cf_group[name] = CFDataVariable(name, self._dataset.variables[name]) + print("name") def _build_cf_groups(self): """Build the first order relationships between CF-netCDF variables.""" @@ -1523,6 +1539,13 @@ def _build(cf_variable): category=iris.warnings.IrisCfNonSpanningVarWarning, ) + if hasattr(cf_variable, "bounds"): + if cf_variable.bounds not in cf_group: + bounds_var = self.cf_group[cf_variable.bounds] + # TODO: warning if fails spans + if bounds_var.spans(cf_variable): + cf_group[bounds_var.cf_name] = bounds_var + # Build CF data variable relationships. if isinstance(cf_variable, CFDataVariable): # Add global netCDF attributes. diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 12c1510413..01e6f062b4 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -65,6 +65,8 @@ def netcdf_variable( standard_name=standard_name, **{name: None for name in ugrid_identities}, ) + if bounds is None: + del ncvar.bounds return ncvar @@ -91,9 +93,9 @@ def test_create_global_attributes(self): class Test_translate__formula_terms(tests.IrisTest): def setUp(self): - self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") + self.delta = netcdf_variable("delta", "height", np.float64) self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) - self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") + self.sigma = netcdf_variable("sigma", "height", np.float64) self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" @@ -186,9 +188,9 @@ def test_create_formula_terms(self): class Test_build_cf_groups__formula_terms(tests.IrisTest): def setUp(self): - self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") + self.delta = netcdf_variable("delta", "height", np.float64) self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) - self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") + self.sigma = netcdf_variable("sigma", "height", np.float64) self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" From b756210907adddcad9b3621690586cc4f6084b1a Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 25 Oct 2024 17:21:02 +0100 Subject: [PATCH 03/74] fix tests and example --- lib/iris/fileformats/cf.py | 74 ++++++------------- .../unit/fileformats/cf/test_CFReader.py | 28 +++++-- 2 files changed, 45 insertions(+), 57 deletions(-) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 0b67a58c2e..35bb64587d 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -16,6 +16,7 @@ from abc import ABCMeta, abstractmethod from collections.abc import Iterable, MutableMapping +import contextlib import os import re from typing import ClassVar @@ -1418,72 +1419,43 @@ def _translate(self): for cf_var in formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): - - - # Ignore formula terms owned by a bounds variable. - # if cf_root not in self.cf_group.bounds: - # cf_name = cf_var.cf_name - # if cf_var.cf_name not in self.cf_group: - # self.cf_group[cf_name] = CFAuxiliaryCoordinateVariable( - # cf_name, cf_var.cf_data - # ) - # self.cf_group[cf_name].add_formula_term(cf_root, cf_term) - - if cf_root not in self.cf_group.bounds: - - # Check if cf_root has a bounds attribute. - if cf_root in self.cf_group.coordinates: - # Need to generalise this for if it's a dim or aux coord. - bounds_name = getattr( - self.cf_group.coordinates[cf_root], "bounds", None + bounds_name = None + cf_root_coord = self.cf_group.coordinates.get(cf_root) + with contextlib.suppress(AttributeError): + # Copes with cf_root_coord not existing, OR not having + # `bounds` attribute. + bounds_name = cf_root_coord.bounds + if bounds_name is not None: + # This will error if more or less than 1 variable is found. + (bounds_var,) = [ + f + for f in formula_terms.values() + if f.cf_terms_by_root.get(bounds_name) == cf_term + ] + if bounds_var != cf_var: + cf_var.bounds = bounds_var.cf_name + new_var = CFBoundaryVariable( + bounds_var.cf_name, bounds_var.cf_data ) - if bounds_name is not None: - form_terms = getattr( - self.cf_group.coordinates[cf_root], "formula_terms" - ) - form_terms = form_terms.replace(":", "") - form_terms = form_terms.split(" ") - example_dict = {"a": "A", "b": "B", "ps": "PS", "p0": "P0", "orog": "orography"} - for cf_vari in formula_terms.values(): - for ( - cf_roots, - cf_terms, - ) in cf_vari.cf_terms_by_root.items(): - if cf_terms == cf_term: - if ( - cf_roots in self.cf_group.bounds - and cf_roots == bounds_name - ): - if cf_terms in form_terms: - to_attach_to = example_dict[cf_terms] - attach_from = cf_vari.cf_name - if ( - to_attach_to.lower() - != attach_from.lower() - ): - self.cf_group[cf_vari.cf_name] = CFBoundaryVariable(cf_vari.cf_name, cf_vari.cf_data) - cf_var.bounds = cf_vari.cf_name - + new_var.add_formula_term(bounds_name, cf_term) + self.cf_group[bounds_var.cf_name] = new_var + if cf_root not in self.cf_group.bounds: cf_name = cf_var.cf_name if cf_var.cf_name not in self.cf_group: - new_var = CFAuxiliaryCoordinateVariable( - cf_name, cf_var.cf_data - ) + new_var = CFAuxiliaryCoordinateVariable(cf_name, cf_var.cf_data) if hasattr(cf_var, "bounds"): new_var.bounds = cf_var.bounds + new_var.add_formula_term(cf_root, cf_term) self.cf_group[cf_name] = new_var - self.cf_group[cf_name].add_formula_term(cf_root, cf_term) # Determine the CF data variables. data_variable_names = ( set(netcdf_variable_names) - self.cf_group.non_data_variable_names ) - print("name") for name in data_variable_names: self.cf_group[name] = CFDataVariable(name, self._dataset.variables[name]) - print("name") def _build_cf_groups(self): """Build the first order relationships between CF-netCDF variables.""" diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 01e6f062b4..f84ed5766b 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -174,9 +174,6 @@ def test_create_formula_terms(self): self.assertEqual(set(group.keys()), set(aux_coordinates)) for name in aux_coordinates: self.assertIs(group[name].cf_data, getattr(self, name)) - # Check all the auxiliary coordinates are formula terms. - formula_terms = cf_group.formula_terms - self.assertEqual(set(group.items()), set(formula_terms.items())) # Check there are three bounds. group = cf_group.bounds self.assertEqual(len(group), 3) @@ -184,6 +181,17 @@ def test_create_formula_terms(self): self.assertEqual(set(group.keys()), set(bounds)) for name in bounds: self.assertEqual(group[name].cf_data, getattr(self, name)) + # Check the formula terms contains all expected terms + formula_terms = cf_group.formula_terms + expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] + expected_group = { + k: v + for k, v in dict( + **cf_group.auxiliary_coordinates, **cf_group.bounds + ).items() + if k in expected_keys + } + self.assertEqual(set(expected_group.items()), set(formula_terms.items())) class Test_build_cf_groups__formula_terms(tests.IrisTest): @@ -273,11 +281,9 @@ def test_associate_formula_terms_with_data_variable(self): self.assertEqual(len(group), 5) aux_coordinates = ["delta", "sigma", "orography", "x", "y"] self.assertEqual(set(group.keys()), set(aux_coordinates)) + formula_terms = cf_group.formula_terms for name in aux_coordinates: self.assertIs(group[name].cf_data, getattr(self, name)) - # Check all the auxiliary coordinates are formula terms. - formula_terms = cf_group.formula_terms - self.assertTrue(set(formula_terms.items()).issubset(list(group.items()))) # Check the terms by root. for name, term in zip(aux_coordinates, ["a", "b", "orog"]): self.assertEqual( @@ -294,6 +300,16 @@ def test_associate_formula_terms_with_data_variable(self): aux_coord_group[name_bnds].cf_data, getattr(self, name_bnds), ) + # Check the formula terms contains all expected terms + expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] + expected_group = { + k: v + for k, v in dict( + **cf_group.auxiliary_coordinates, **cf_group.bounds + ).items() + if k in expected_keys + } + self.assertEqual(set(expected_group.items()), set(formula_terms.items())) def test_promote_reference(self): with mock.patch( From 22783f39ec9860a4235a8aa8dd0269a10b7f1e1f Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:11:13 +0000 Subject: [PATCH 04/74] Updated environment lockfiles (#6197) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 13 ++++++------- requirements/locks/py311-linux-64.lock | 13 ++++++------- requirements/locks/py312-linux-64.lock | 13 ++++++------- 3 files changed, 18 insertions(+), 21 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index b4f151992d..a67480e8a2 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -65,7 +65,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda#e97f73d51b5acdf1340a15b195738f16 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -125,7 +125,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda#274f367df5d56f152a49ed3203c3b1c1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.15-h4a871b0_2_cpython.conda#98059097f62e97be9aed7ec904055825 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -158,7 +158,7 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py310ha75aee5_1.conda#62e8958a19b0417b0b015840d54d6f45 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py310ha75aee5_0.conda#8aac4068f272b6bdeb0aa0f29d8e516f https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe @@ -189,7 +189,7 @@ https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda# https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py310ha75aee5_2.conda#d38aa9579b7210c646e6faef1aed5bbb -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py310ha75aee5_2.conda#6221fa8287780a9bf42aa88719933dbe +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py310ha75aee5_0.conda#a42a2ed94df11c5cfa5348a317e1f197 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 @@ -212,7 +212,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e97 https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py310ha75aee5_1.conda#260c9ae4b2d9af7d5cce7b721cba6132 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310h2372a71_0.conda#72637c58d36d9475fda24700c9796f19 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310ha75aee5_1.conda#ee18e67b0bd283f6a75369936451d6ac https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 @@ -304,7 +304,7 @@ https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py310hfcf56fc_1.con https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py310had3dfd6_2.conda#a4166b41e54d22e794859641b7cae2d0 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.15.5-py310ha75aee5_0.conda#997218ef5f9619062042b2ac944e222a +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py310ha75aee5_0.conda#f0734f65184577c08c9f1ba92cd9f57f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py310h89163eb_0.conda#cdc075f4328556adf4dde97b4f4a0532 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310hf462985_6.conda#b8ad2d561f4e0db4f09d06cc0e73e0b0 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b @@ -338,4 +338,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index d200ab99f7..60a47736e4 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -65,7 +65,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda#e97f73d51b5acdf1340a15b195738f16 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -125,7 +125,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda#274f367df5d56f152a49ed3203c3b1c1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.10-hc5c86c4_3_cpython.conda#9e1ad55c87368e662177661a998feed5 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -158,7 +158,7 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py311h9ecbd09_1.conda#4605a44155b0c25da37e8f40318c78a4 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py311h9ecbd09_0.conda#75424a18fb275a18b288c099b869c3bc https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe @@ -190,7 +190,7 @@ https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda# https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py311h9ecbd09_2.conda#85a56dd3b692fb5435de1e901354b5b8 -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py311h9ecbd09_2.conda#8b746f1e8fc1cd8f7ce67ad694d7530b +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py311h9ecbd09_0.conda#0ffc1f53106a38f059b151c465891ed3 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 @@ -213,7 +213,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e97 https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py311h9ecbd09_1.conda#616fed0b6f5c925250be779b05d1d7f7 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py311h459d7ec_0.conda#65948e1e5d2ebcc3b8ed9954fd5e91b4 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py311h9ecbd09_1.conda#00895577e2b4c24dca76675ab1862551 https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 @@ -256,7 +256,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#e https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.15.5-py311h9ecbd09_0.conda#dacbddd0f055b477adbbf8d6d52ff74f +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py311h9ecbd09_0.conda#d9c23163e7ac5f8926372c7d792a996f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py311h2dc5d0c_0.conda#4f0fa0019a6e7be77db3609a707a4581 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda#2a92e152208121afadf85a5e1f3a5f4d @@ -337,4 +337,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 850c2b5400..99dc274e80 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -65,7 +65,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_1.conda#e97f73d51b5acdf1340a15b195738f16 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -125,7 +125,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_1.conda#274f367df5d56f152a49ed3203c3b1c1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -158,7 +158,7 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py312h66e93f0_1.conda#0ad3232829b9509599d8f981c12c9d05 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe @@ -190,7 +190,7 @@ https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda# https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py312h66e93f0_2.conda#2c6c0c68f310bc33972e7c83264d7786 -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_2.conda#e6d115113d912f9c2cc8cddddac20d61 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 @@ -213,7 +213,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e97 https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h98912ed_0.conda#b083847f580eab40a2d58bddf3182b41 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 @@ -256,7 +256,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#e https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.15.5-py312h66e93f0_0.conda#a17fd28f7b4b77527218535fddb8acf5 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda#2a92e152208121afadf85a5e1f3a5f4d @@ -337,4 +337,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - From 23c74fe3cf68d6a12fdc4c61ee8a5d97bfb6c9f4 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:39:35 +0000 Subject: [PATCH 05/74] added in a vertical rule for surface fields (#5734) * added in a vertical rule for surface fields * added in PR # to whatsnew * added flags * tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fixed whatsnew * improved paramaterisation * added doc improvements * fixed underscore * doc changes * Fixed spacing error --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/src/further_topics/um_files_loading.rst | 6 +++++ docs/src/whatsnew/latest.rst | 9 +++++++ lib/iris/fileformats/pp.py | 27 +++++++++++++++----- lib/iris/fileformats/pp_save_rules.py | 22 +++++++++++++--- lib/iris/tests/test_cube_to_pp.py | 25 ++++++++++++++++++ 5 files changed, 80 insertions(+), 9 deletions(-) diff --git a/docs/src/further_topics/um_files_loading.rst b/docs/src/further_topics/um_files_loading.rst index c5238e6b70..2d2eb973e4 100644 --- a/docs/src/further_topics/um_files_loading.rst +++ b/docs/src/further_topics/um_files_loading.rst @@ -315,6 +315,12 @@ the derived ``altitude``. it produces basic coordinates 'model_level_number', 'sigma' and 'level_pressure', and a manufactured 3D 'air_pressure' coordinate. +**Surface Fields** + +In order for surface fields to be recognised when saving, you must include +`label_surface_fields=True` to :func:`iris.fileformats.pp.save` or +:func:`iris.save`. When surface fields are encountered with this flag set to True, +LBLEV will be set to 9999 and LBVC to 129. .. _um_time_metadata: diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index edc6ebfe2d..c862ef192e 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -50,6 +50,11 @@ This document explains the changes made to Iris for this release references are split across multiple input fields, and :meth:`~iris.LOAD_POLICY` to control it, as requested in :issue:`5369`, actioned in :pull:`6168`. +#. `@ESadek-MO`_ has updated :mod:`iris.fileformats.pp_save_rules` and + :mod:`iris.fileformats.pp` to include the `label_surface_fields` flag across + relevant functions, most notably :func:`iris.fileformats.pp.save`. + This allows the user to choose whether or not surface fields are recognised + and handled appropriately. (:issue:`3280`, :pull:`5734`) 🐛 Bugs Fixed ============= @@ -116,6 +121,10 @@ This document explains the changes made to Iris for this release #. `@bouweandela`_ added type hints for :class:`~iris.cube.Cube`. (:pull:`6037`) +#. `@ESadek-MO`_ has updated :ref:`um_files_loading` to include a short description + of the new `label_surface_fields` functionality. (:pull:`5734`) + + 💼 Internal =========== diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index ce92d4456e..3b38304f00 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -2171,7 +2171,7 @@ def _load_cubes_variable_loader( return result -def save(cube, target, append=False, field_coords=None): +def save(cube, target, append=False, field_coords=None, label_surface_fields=False): """Use the PP saving rules (and any user rules) to save a cube to a PP file. Parameters @@ -2192,6 +2192,11 @@ def save(cube, target, append=False, field_coords=None): coordinates of the resulting fields. If None, the final two dimensions are chosen for slicing. + label_surface_fields : bool, default=False + Whether you wish pp_save_rules to recognise surface fields or not. + When true, if surface fields are encountered, LBLEV will be set to 9999 + and LBVC to 129. + Default is False. Notes ----- @@ -2200,11 +2205,11 @@ def save(cube, target, append=False, field_coords=None): of cubes to be saved to a PP file. """ - fields = as_fields(cube, field_coords) + fields = as_fields(cube, field_coords, label_surface_fields=label_surface_fields) save_fields(fields, target, append=append) -def save_pairs_from_cube(cube, field_coords=None): +def save_pairs_from_cube(cube, field_coords=None, label_surface_fields=False): """Use the PP saving rules to generate (2D cube, PP field) pairs from a cube. Parameters @@ -2316,12 +2321,12 @@ def save_pairs_from_cube(cube, field_coords=None): # Run the PP save rules on the slice2D, to fill the PPField, # recording the rules that were used - pp_field = verify(slice2D, pp_field) + pp_field = verify(slice2D, pp_field, label_surface_fields=label_surface_fields) yield (slice2D, pp_field) -def as_fields(cube, field_coords=None): +def as_fields(cube, field_coords=None, label_surface_fields=False): """Use the PP saving rules to convert a cube to an iterable of PP fields. Use the PP saving rules (and any user rules) to convert a cube to @@ -2335,9 +2340,19 @@ def as_fields(cube, field_coords=None): reducing the given cube into 2d slices, which will ultimately determine the x and y coordinates of the resulting fields. If None, the final two dimensions are chosen for slicing. + label_surface_fields : bool, default=False + Whether you wish pp_save_rules to recognise surface fields or not. + When true, if surface fields are encountered, LBLEV will be set to 9999 + and LBVC to 129. + Default is False. """ - return (field for _, field in save_pairs_from_cube(cube, field_coords=field_coords)) + return ( + field + for _, field in save_pairs_from_cube( + cube, field_coords=field_coords, label_surface_fields=label_surface_fields + ) + ) def save_fields(fields, target, append: bool = False): diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index b8e95d2160..b156260f72 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -663,7 +663,7 @@ def _lbproc_rules(cube, pp): return pp -def _vertical_rules(cube, pp): +def _vertical_rules(cube, pp, label_surface_fields=False): """Rule for setting vertical levels for the PP field. Parameters @@ -773,6 +773,22 @@ def _vertical_rules(cube, pp): pp.brsvd[0] = depth_coord.bounds[0, 0] pp.brlev = depth_coord.bounds[0, 1] + # Surface field. + if ( + height_coord is None + and depth_coord is None + and pressure_coord is None + and soil_mln_coord is None + and apt_coord is None + and air_pres_coord is None + and level_height_coord is None + and mln_coord is None + and sigma_coord is None + and label_surface_fields + ): + pp.lbvc = 129 + pp.lblev = 9999 + # Single potential-temperature level. if ( apt_coord is not None @@ -883,7 +899,7 @@ def _all_other_rules(cube, pp): return pp -def verify(cube, field): +def verify(cube, field, label_surface_fields=False): # Rules functions. field = _basic_coord_system_rules(cube, field) field = _um_version_rules(cube, field) @@ -893,7 +909,7 @@ def verify(cube, field): field = _grid_and_pole_rules(cube, field) field = _non_std_cross_section_rules(cube, field) field = _lbproc_rules(cube, field) - field = _vertical_rules(cube, field) + field = _vertical_rules(cube, field, label_surface_fields=label_surface_fields) field = _all_other_rules(cube, field) return field diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py index 6ae4567f49..fa06ba553f 100644 --- a/lib/iris/tests/test_cube_to_pp.py +++ b/lib/iris/tests/test_cube_to_pp.py @@ -370,6 +370,31 @@ def test_lbvc(self): self.assertEqual(field.lblev, lblev) self.assertEqual(field.blev, blev) + def test_surface_field(self): + def setup_cube(coord=None): + cube = stock.lat_lon_cube() + if coord: + cube.add_aux_coord(coord) + temp_pp_path = iris.util.create_temp_filename(".pp") + iris.fileformats.pp.save( + cube, target=temp_pp_path, label_surface_fields=True + ) + cube = iris.fileformats.pp.load(temp_pp_path) + return cube + + # check surface fields are correctly applied + cube = setup_cube() + for field in cube: + self.assertEqual(field.lbvc, 129) + self.assertEqual(field.lblev, 9999) + + # check surface fields aren't incorrectly applied + v_coord = iris.coords.DimCoord(standard_name="depth", units="m", points=[-5]) + cube = setup_cube(v_coord) + for field in cube: + self.assertNotEqual(field.lbvc, 129) + self.assertNotEqual(field.lblev, 9999) + def fields_from_cube(cubes): """Return an iterator of PP fields generated from saving the given cube(s) From 6efa88997a19c4afde8c4afc1393b6bcb78225b2 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Mon, 28 Oct 2024 12:51:29 +0100 Subject: [PATCH 06/74] Specify meta in dask.array.map_blocks (#5989) * Specify meta in da.map_blocks * Improve tests * Add more tests * Add whatsnew * Specify dtype in map_complete_blocks and undo change to lazy_elementwise --------- Co-authored-by: Patrick Peglar --- docs/src/whatsnew/latest.rst | 3 + lib/iris/_lazy_data.py | 12 +++- lib/iris/analysis/__init__.py | 7 +- lib/iris/analysis/_area_weighted.py | 13 +++- lib/iris/analysis/_regrid.py | 13 +++- lib/iris/mesh/utils.py | 5 +- .../regrid/test_RectilinearRegridder.py | 13 ++++ .../tests/unit/analysis/test_PERCENTILE.py | 10 +-- .../lazy_data/test_map_complete_blocks.py | 64 +++++++++++++++++-- 9 files changed, 116 insertions(+), 24 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index c862ef192e..35b101ab3d 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -62,6 +62,9 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ enabled partial collapse of multi-dimensional string coordinates, fixing :issue:`3653`. (:pull:`5955`) +#. `@bouweandela`_ made further updates to the ``chunktype`` of Dask arrays, + so it corresponds better with the array content. (:pull:`5989`) + #. `@ukmo-ccbunney`_ improved error handling for malformed `cell_method` attribute. Also made cell_method string parsing more lenient w.r.t. whitespace. (:pull:`6083`) diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index cd093b315c..a3dfa1edb4 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -537,11 +537,12 @@ def lazy_elementwise(lazy_array, elementwise_op): # call may cast to float, or not, depending on unit equality : Thus, it's # much safer to get udunits to decide that for us. dtype = elementwise_op(np.zeros(1, lazy_array.dtype)).dtype + meta = da.utils.meta_from_array(lazy_array).astype(dtype) - return da.map_blocks(elementwise_op, lazy_array, dtype=dtype) + return da.map_blocks(elementwise_op, lazy_array, dtype=dtype, meta=meta) -def map_complete_blocks(src, func, dims, out_sizes, *args, **kwargs): +def map_complete_blocks(src, func, dims, out_sizes, dtype, *args, **kwargs): """Apply a function to complete blocks. Complete means that the data is not chunked along the chosen dimensions. @@ -557,6 +558,8 @@ def map_complete_blocks(src, func, dims, out_sizes, *args, **kwargs): Dimensions that cannot be chunked. out_sizes : tuple of int Output size of dimensions that cannot be chunked. + dtype : + Output dtype. *args : tuple Additional arguments to pass to `func`. **kwargs : dict @@ -596,8 +599,11 @@ def map_complete_blocks(src, func, dims, out_sizes, *args, **kwargs): for dim, size in zip(dims, out_sizes): out_chunks[dim] = size + # Assume operation preserves mask. + meta = da.utils.meta_from_array(data).astype(dtype) + result = data.map_blocks( - func, *args, chunks=out_chunks, dtype=src.dtype, **kwargs + func, *args, chunks=out_chunks, meta=meta, dtype=dtype, **kwargs ) return result diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 215d6dff0a..2c890ef8cc 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1390,9 +1390,10 @@ def _percentile(data, percent, fast_percentile_method=False, **kwargs): result = iris._lazy_data.map_complete_blocks( data, - _calc_percentile, - (-1,), - percent.shape, + func=_calc_percentile, + dims=(-1,), + out_sizes=percent.shape, + dtype=np.float64, percent=percent, fast_percentile_method=fast_percentile_method, **kwargs, diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index a25a21bb47..3ed4f8aa33 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -392,11 +392,18 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( tgt_shape = (len(grid_y.points), len(grid_x.points)) + # Specify the output dtype + if np.issubdtype(src_cube.dtype, np.integer): + out_dtype = np.float64 + else: + out_dtype = src_cube.dtype + new_data = map_complete_blocks( src_cube, - _regrid_along_dims, - (src_y_dim, src_x_dim), - meshgrid_x.shape, + func=_regrid_along_dims, + dims=(src_y_dim, src_x_dim), + out_sizes=meshgrid_x.shape, + dtype=out_dtype, x_dim=src_x_dim, y_dim=src_y_dim, weights=weights, diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 0f375e69f4..fd56eb04a1 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -943,11 +943,18 @@ def __call__(self, src): x_dim = src.coord_dims(src_x_coord)[0] y_dim = src.coord_dims(src_y_coord)[0] + # Specify the output dtype + if self._method == "linear" and np.issubdtype(src.dtype, np.integer): + out_dtype = np.float64 + else: + out_dtype = src.dtype + data = map_complete_blocks( src, - self._regrid, - (y_dim, x_dim), - sample_grid_x.shape, + func=self._regrid, + dims=(y_dim, x_dim), + out_sizes=sample_grid_x.shape, + dtype=out_dtype, x_dim=x_dim, y_dim=y_dim, src_x_coord=src_x_coord, diff --git a/lib/iris/mesh/utils.py b/lib/iris/mesh/utils.py index d054cfec4f..3930fa3f1b 100644 --- a/lib/iris/mesh/utils.py +++ b/lib/iris/mesh/utils.py @@ -277,6 +277,9 @@ def fill_region(target, regiondata, regioninds): # Notes on resultant calculation properties: # 1. map_blocks is chunk-mapped, so it is parallelisable and space-saving # 2. However, fetching less than a whole chunk is not efficient + meta = np.ma.array( + np.empty((0,) * result_array.ndim, dtype=result_array.dtype), mask=True + ) for cube in submesh_cubes: # Lazy data array from the region cube sub_data = cube.lazy_data() @@ -300,7 +303,7 @@ def fill_region(target, regiondata, regioninds): sub_data, indarr, dtype=result_array.dtype, - meta=np.ndarray, + meta=meta, ) # Construct the result cube diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py index 284d52d3f9..3f841b938a 100644 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py @@ -474,12 +474,25 @@ def setUp(self): self.args = ("linear", "mask") self.regridder = Regridder(self.cube, self.cube, *self.args) self.lazy_cube = self.cube.copy(da.asarray(self.cube.data)) + self.lazy_masked_cube = self.lazy_cube.copy(da.ma.masked_array(self.cube.data)) self.lazy_regridder = Regridder(self.lazy_cube, self.lazy_cube, *self.args) def test_lazy_regrid(self): result = self.lazy_regridder(self.lazy_cube) self.assertTrue(result.has_lazy_data()) + meta = da.utils.meta_from_array(result.core_data()) + self.assertTrue(meta.__class__ is np.ndarray) expected = self.regridder(self.cube) + self.assertEqual(result.dtype, expected.dtype) + self.assertTrue(result == expected) + + def test_lazy_masked_regrid(self): + result = self.lazy_regridder(self.lazy_masked_cube) + self.assertTrue(result.has_lazy_data()) + meta = da.utils.meta_from_array(result.core_data()) + self.assertTrue(isinstance(meta, np.ma.MaskedArray)) + expected = self.regridder(self.cube) + self.assertEqual(result.dtype, expected.dtype) self.assertTrue(result == expected) diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index 0d759c621f..72218af830 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -155,10 +155,10 @@ def test_default_kwargs_passed(self, mocked_mquantiles): if self.lazy: data = as_lazy_data(data) - self.agg_method(data, axis=axis, percent=percent) + result = self.agg_method(data, axis=axis, percent=percent) # Trigger calculation for lazy case. - as_concrete_data(data) + as_concrete_data(result) for key in ["alphap", "betap"]: self.assertEqual(mocked_mquantiles.call_args.kwargs[key], 1) @@ -170,10 +170,12 @@ def test_chosen_kwargs_passed(self, mocked_mquantiles): if self.lazy: data = as_lazy_data(data) - self.agg_method(data, axis=axis, percent=percent, alphap=0.6, betap=0.5) + result = self.agg_method( + data, axis=axis, percent=percent, alphap=0.6, betap=0.5 + ) # Trigger calculation for lazy case. - as_concrete_data(data) + as_concrete_data(result) for key, val in zip(["alphap", "betap"], [0.6, 0.5]): self.assertEqual(mocked_mquantiles.call_args.kwargs[key], val) diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index 7403a5611e..7d619353ed 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -32,13 +32,27 @@ def create_mock_cube(array): class Test_map_complete_blocks(tests.IrisTest): def setUp(self): self.array = np.arange(8).reshape(2, 4) - self.func = lambda chunk: chunk + 1 + + def func(chunk): + """Use a function that cannot be 'sampled'. + + To make sure the call to map_blocks is correct for any function, + we define this function that cannot be called with size 0 arrays + to infer the output meta. + """ + if chunk.size == 0: + raise ValueError + return chunk + 1 + + self.func = func self.func_result = self.array + 1 def test_non_lazy_input(self): # Check that a non-lazy input doesn't trip up the functionality. cube, cube_data = create_mock_cube(self.array) - result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) + result = map_complete_blocks( + cube, self.func, dims=(1,), out_sizes=(4,), dtype=self.array.dtype + ) self.assertFalse(is_lazy_data(result)) self.assertArrayEqual(result, self.func_result) # check correct data was accessed @@ -48,7 +62,9 @@ def test_non_lazy_input(self): def test_lazy_input(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) cube, cube_data = create_mock_cube(lazy_array) - result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) + result = map_complete_blocks( + cube, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype + ) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), self.func_result) # check correct data was accessed @@ -57,14 +73,44 @@ def test_lazy_input(self): def test_dask_array_input(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) - result = map_complete_blocks(lazy_array, self.func, dims=(1,), out_sizes=(4,)) + result = map_complete_blocks( + lazy_array, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype + ) + self.assertTrue(is_lazy_data(result)) + self.assertArrayEqual(result.compute(), self.func_result) + + def test_dask_masked_array_input(self): + array = da.ma.masked_array(np.arange(2), mask=np.arange(2)) + result = map_complete_blocks( + array, self.func, dims=tuple(), out_sizes=tuple(), dtype=array.dtype + ) self.assertTrue(is_lazy_data(result)) + self.assertTrue(isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray)) + self.assertArrayEqual(result.compute(), np.ma.masked_array([1, 2], mask=[0, 1])) + + def test_dask_array_input_with_different_output_dtype(self): + lazy_array = da.ma.masked_array(self.array, chunks=((1, 1), (4,))) + dtype = np.float32 + + def func(chunk): + if chunk.size == 0: + raise ValueError + return (chunk + 1).astype(np.float32) + + result = map_complete_blocks( + lazy_array, func, dims=(1,), out_sizes=(4,), dtype=dtype + ) + self.assertTrue(isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray)) + self.assertTrue(result.dtype == dtype) + self.assertTrue(result.compute().dtype == dtype) self.assertArrayEqual(result.compute(), self.func_result) def test_rechunk(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (2, 2))) cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks(cube, self.func, dims=(1,), out_sizes=(4,)) + result = map_complete_blocks( + cube, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype + ) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), self.func_result) @@ -76,7 +122,9 @@ def func(_): return np.arange(2).reshape(1, 2) func_result = [[0, 1], [0, 1]] - result = map_complete_blocks(cube, func, dims=(1,), out_sizes=(2,)) + result = map_complete_blocks( + cube, func, dims=(1,), out_sizes=(2,), dtype=lazy_array.dtype + ) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), func_result) @@ -84,7 +132,9 @@ def test_multidimensional_input(self): array = np.arange(2 * 3 * 4).reshape(2, 3, 4) lazy_array = da.asarray(array, chunks=((1, 1), (1, 2), (4,))) cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks(cube, self.func, dims=(1, 2), out_sizes=(3, 4)) + result = map_complete_blocks( + cube, self.func, dims=(1, 2), out_sizes=(3, 4), dtype=lazy_array.dtype + ) self.assertTrue(is_lazy_data(result)) self.assertArrayEqual(result.compute(), array + 1) From 3d4df1248b81c7f69d2ad12a06caad7dde1efa13 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 28 Oct 2024 13:44:52 +0000 Subject: [PATCH 07/74] Update CF standard names table. (#6200) * Update CF standard names table. * ADDED WHATSNEW --- docs/src/whatsnew/latest.rst | 3 + etc/cf-standard-name-table.xml | 3660 +++++++++++++++++--------------- 2 files changed, 1948 insertions(+), 1715 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 35b101ab3d..6700774480 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -56,6 +56,9 @@ This document explains the changes made to Iris for this release This allows the user to choose whether or not surface fields are recognised and handled appropriately. (:issue:`3280`, :pull:`5734`) +#. `@ESadek-MO`_ updated to the latest CF Standard Names Table v86 + (5 September 2024). (:pull:`6200`) + 🐛 Bugs Fixed ============= diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index c5405e2dca..f08d927ad7 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,11 +1,12 @@ - - 85 - CF-StandardNameTable-85 - 2024-05-21T15:55:10Z - 2024-05-21T15:55:10Z + + 86 + CF-StandardNameTable-86 + 2024-09-05T10:52:50Z + 2024-09-05T10:52:50Z Centre for Environmental Data Analysis support@ceda.ac.uk + 1 @@ -14,6 +15,41 @@ Acoustic area backscattering strength is 10 times the log10 of the ratio of the area backscattering coefficient to the reference value, 1 (m2 m-2). Area backscattering coefficient is the integral of the volume backscattering coefficient over a defined distance. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. For further details see MacLennan et. al (2002) doi:10.1006/jmsc.2001.1158. + + m + + + Acoustic centre of mass is the average of all sampled depths weighted by their volume backscattering coefficient. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. For further details see Urmy et. al (2012) doi:10.1093/icesjms/fsr205. + + + + m + + + Acoustic equivalent area is the squared area backscattering coefficient divided by the depth integral of squared volume backscattering coefficient. Area backscattering coefficient is the integral of the volume backscattering coefficient over a defined distance. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. The parameter is computed to provide a value that represents the area that would be occupied if all data cells contained the mean density and is the reciprocal of acoustic_index_of_aggregation_in_sea_water. For further details see Urmy et. al (2012) doi:10.1093/icesjms/fsr205 and Woillez et. al (2007) doi.org/10.1093/icesjms/fsm025. + + + + m-1 + + + Acoustic index of aggregation is the depth integral of squared volume backscattering coefficient divided by the squared area backscattering coefficient. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. Area backscattering coefficient is the integral of the volume backscattering coefficient over a defined distance. The parameter is computed to provide a value that represents the patchiness of biomass in the water column in the field of fisheries acoustics - the value is high when small areas are much denser than the rest of the distribution. The parameter is also the reciprocal of acoustic_equivalent_area_in_sea_water. For further details see Urmy et. al (2012) doi:10.1093/icesjms/fsr205 and Woillez et. al (2007) doi.org/10.1093/icesjms/fsm025. + + + + m-2 + + + Acoustic inertia is the sum of squared distances from the acoustic_centre_of_mass weighted by the volume backscattering coefficient at each distance and normalized by the total area backscattering coefficient. Volume backscattering coefficient is the linear form of acoustic_volume_backscattering_strength_in_sea_water. Area backscattering coefficient is the integral of the volume backscattering coefficient over a defined distance. For further details see Urmy et. al (2012) doi:10.1093/icesjms/fsr205 and Bez and Rivoirard (2001) doi:10.1016/S0165-7836(00)00241-1. + + + + 1 + + + Acoustic proportion occupied is occupied volume divided by the volume sampled. Occupied volume is the integral of the ratio of acoustic_volume_backscattering_strength_in_sea_water above -90 dB to the reference value, 1 m2 m-2. For further details see Urmy et. al (2012) doi:10.1093/icesjms/fsr205. + + s @@ -74,7 +110,7 @@ day - "Age of surface snow" means the length of time elapsed since the snow accumulated on the earth's surface. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. + "Age of surface snow" means the length of time elapsed since the snow accumulated on the earth's surface. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. @@ -249,7 +285,7 @@ m - The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. To apply the altimeter range correction it must be added to the quantity with standard name altimeter_range. "Correction_due_to_ionosphere" means a correction for the atmosphere's electron content in the ionosphere. Additional altimeter range corrections are given by the quantities with standard names altimeter_range_correction_due_to_wet_troposphere, altimeter_range_correction_due_to_dry_troposphere, sea_surface_height_correction_due_to_air_pressure_at_low_frequency and sea_surface_height_correction_due_to_air_pressure_and_wind_at_high_frequency. + The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. To apply the altimeter range correction it must be added to the quantity with standard name altimeter_range. "Correction_due_to_ionosphere" means a correction for the atmosphere's electron content in the ionosphere. Additional altimeter range corrections are given by the quantities with standard names altimeter_range_correction_due_to_wet_troposphere, altimeter_range_correction_due_to_dry_troposphere, sea_surface_height_correction_due_to_air_pressure_at_low_frequency and sea_surface_height_correction_due_to_air_pressure_and_wind_at_high_frequency. @@ -368,35 +404,35 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. To specify which area is quantified by a variable with standard name area_fraction, provide a coordinate variable or scalar coordinate variable with standard name area_type. Alternatively, if one is defined, use a more specific standard name of X_area_fraction for the fraction of horizontal area occupied by X. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. To specify which area is quantified by a variable with standard name area_fraction, provide a coordinate variable or scalar coordinate variable with standard name area_type. Alternatively, if one is defined, use a more specific standard name of X_area_fraction for the fraction of horizontal area occupied by X. 1 psbg - The quantity with standard name area_fraction_below_surface is the fraction of horizontal area where a given isobaric surface is below the (ground or sea) surface. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The surface called "surface" means the lower boundary of the atmosphere. + The quantity with standard name area_fraction_below_surface is the fraction of horizontal area where a given isobaric surface is below the (ground or sea) surface. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The surface called "surface" means the lower boundary of the atmosphere. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A coordinate variable of solar_zenith_angle indicating the day extent should be specified. Solar zenith angle is the the angle between the line of sight to the sun and the local vertical. @@ -1117,7 +1153,7 @@ kg m-2 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The chemical formula for the hydroperoxyl radical is HO2. In chemistry, a 'radical' is a highly reactive, and therefore short lived, species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The chemical formula for the hydroperoxyl radical is HO2. In chemistry, a 'radical' is a highly reactive, and therefore short lived, species. @@ -1292,14 +1328,14 @@ kg m-2 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. kg m-2 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -1894,7 +1930,7 @@ mol - The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -2090,14 +2126,14 @@ mol - The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. mol - The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -2419,28 +2455,28 @@ s-1 41 - Atmosphere upward absolute vorticity is the sum of the atmosphere upward relative vorticity and the vertical component of vorticity due to the Earth’s rotation. In contrast, the quantity with standard name atmosphere_upward_relative_vorticity excludes the Earth's rotation. Vorticity is a vector quantity. "Upward" indicates a vector component which is positive when directed upward (negative downward). A positive value of atmosphere_upward_absolute_vorticity indicates anticlockwise rotation when viewed from above. + Atmosphere upward absolute vorticity is the sum of the atmosphere upward relative vorticity and the vertical component of vorticity due to the Earth’s rotation. In contrast, the quantity with standard name atmosphere_upward_relative_vorticity excludes the Earth's rotation. Vorticity is a vector quantity. "Upward" indicates a vector component which is positive when directed upward (negative downward). A positive value of atmosphere_upward_absolute_vorticity indicates anticlockwise rotation when viewed from above. s-1 43 E138 - Atmosphere upward relative vorticity is the vertical component of the 3D air vorticity vector. The vertical component arises from horizontal velocity only. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. In contrast, the quantity with standard name atmosphere_upward_absolute_vorticity includes the Earth's rotation. "Upward" indicates a vector component which is positive when directed upward (negative downward). A positive value of atmosphere_upward_relative_vorticity indicates anticlockwise rotation when viewed from above. + Atmosphere upward relative vorticity is the vertical component of the 3D air vorticity vector. The vertical component arises from horizontal velocity only. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. In contrast, the quantity with standard name atmosphere_upward_absolute_vorticity includes the Earth's rotation. "Upward" indicates a vector component which is positive when directed upward (negative downward). A positive value of atmosphere_upward_relative_vorticity indicates anticlockwise rotation when viewed from above. s-1 - Atmosphere x relative vorticity is the x component of the 3D air vorticity vector. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. "x" indicates a vector component along the grid x-axis, positive with increasing x. A positive value of atmosphere_x_relative_vorticity indicates anticlockwise rotation when viewed by an observer looking along the axis in the direction of decreasing x, i.e. consistent with the "right hand screw" rule. + Atmosphere x relative vorticity is the x component of the 3D air vorticity vector. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. "x" indicates a vector component along the grid x-axis, positive with increasing x. A positive value of atmosphere_x_relative_vorticity indicates anticlockwise rotation when viewed by an observer looking along the axis in the direction of decreasing x, i.e. consistent with the "right hand screw" rule. s-1 - Atmosphere y relative vorticity is the y component of the 3D air vorticity vector. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. "y" indicates a vector component along the grid y-axis, positive with increasing y. A positive value of atmosphere_y_relative_vorticity indicates anticlockwise rotation when viewed by an observer looking along the axis in the direction of decreasing y, i.e. consistent with the "right hand screw" rule. + Atmosphere y relative vorticity is the y component of the 3D air vorticity vector. "Relative" in this context means the vorticity of the air relative to the rotating solid earth reference frame, i.e. excluding the Earth's own rotation. "y" indicates a vector component along the grid y-axis, positive with increasing y. A positive value of atmosphere_y_relative_vorticity indicates anticlockwise rotation when viewed by an observer looking along the axis in the direction of decreasing y, i.e. consistent with the "right hand screw" rule. @@ -2454,7 +2490,7 @@ - The Automated Tropical Cyclone Forecasting System (ATCF) storm identifier is an 8 character string which identifies a tropical cyclone. The storm identifier has the form BBCCYYYY, where BB is the ocean basin, specifically: AL - North Atlantic basin, north of the Equator; SL - South Atlantic basin, south of the Equator; EP - North East Pacific basin, eastward of 140 degrees west longitude; CP - North Central Pacific basin, between the dateline and 140 degrees west longitude; WP - North West Pacific basin, westward of the dateline; IO - North Indian Ocean basin, north of the Equator between 40 and 100 degrees east longitude; SH - South Pacific Ocean basin and South Indian Ocean basin. CC is the cyclone number. Numbers 01 through 49 are reserved for tropical and subtropical cyclones. A cyclone number is assigned to each tropical or subtropical cyclone in each basin as it develops. Numbers are assigned in chronological order. Numbers 50 through 79 are reserved for internal use by operational forecast centers. Numbers 80 through 89 are reserved for training, exercises and testing. Numbers 90 through 99 are reserved for tropical disturbances having the potential to become tropical or subtropical cyclones. The 90's are assigned sequentially and reused throughout the calendar year. YYYY is the four-digit year. This is calendar year for the northern hemisphere. For the southern hemisphere, the year begins July 1, with calendar year plus one. Reference: Miller, R.J., Schrader, A.J., Sampson, C.R., & Tsui, T.L. (1990), The Automated Tropical Cyclone Forecasting System (ATCF), American Meteorological Society Computer Techniques, 5, 653 - 660. + The Automated Tropical Cyclone Forecasting System (ATCF) storm identifier is an 8 character string which identifies a tropical cyclone. The storm identifier has the form BBCCYYYY, where BB is the ocean basin, specifically: AL - North Atlantic basin, north of the Equator; SL - South Atlantic basin, south of the Equator; EP - North East Pacific basin, eastward of 140 degrees west longitude; CP - North Central Pacific basin, between the dateline and 140 degrees west longitude; WP - North West Pacific basin, westward of the dateline; IO - North Indian Ocean basin, north of the Equator between 40 and 100 degrees east longitude; SH - South Pacific Ocean basin and South Indian Ocean basin. CC is the cyclone number. Numbers 01 through 49 are reserved for tropical and subtropical cyclones. A cyclone number is assigned to each tropical or subtropical cyclone in each basin as it develops. Numbers are assigned in chronological order. Numbers 50 through 79 are reserved for internal use by operational forecast centers. Numbers 80 through 89 are reserved for training, exercises and testing. Numbers 90 through 99 are reserved for tropical disturbances having the potential to become tropical or subtropical cyclones. The 90's are assigned sequentially and reused throughout the calendar year. YYYY is the four-digit year. This is calendar year for the northern hemisphere. For the southern hemisphere, the year begins July 1, with calendar year plus one. Reference: Miller, R.J., Schrader, A.J., Sampson, C.R., & Tsui, T.L. (1990), The Automated Tropical Cyclone Forecasting System (ATCF), American Meteorological Society Computer Techniques, 5, 653 - 660. @@ -2629,7 +2665,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Burned area" means the area of burned vegetation. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Burned area" means the area of burned vegetation. @@ -2643,7 +2679,7 @@ 1 - Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. "Canopy" means the vegetative covering over a surface. The canopy is often considered to be the outer surfaces of the vegetation. Plant height and the distribution, orientation and shape of plant leaves within a canopy influence the atmospheric environment and many plant processes within the canopy. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Canopy. The surface_albedo restricted to the area type "vegetation" is related to canopy_albedo, but the former also includes the effect of radiation being reflected from the ground underneath the canopy. + Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. "Canopy" means the vegetative covering over a surface. The canopy is often considered to be the outer surfaces of the vegetation. Plant height and the distribution, orientation and shape of plant leaves within a canopy influence the atmospheric environment and many plant processes within the canopy. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Canopy. The surface_albedo restricted to the area type "vegetation" is related to canopy_albedo, but the former also includes the effect of radiation being reflected from the ground underneath the canopy. @@ -2779,6 +2815,27 @@ Zero change in land ice mass is an arbitrary level. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. The horizontal domain over which the quantity is calculated is described by the associated coordinate variables and coordinate bounds or by a coordinate variable or scalar coordinate variable with the standard name of "region" supplied according to section 6.1.1 of the CF conventions. + + m + + + The change in local mean sea level relative to the local solid surface, i.e. sea floor. The abbreviation "wrt" means "with respect to". A positive value means sea level rise. + + + + m + + + Sea surface height is a time-varying quantity. A reference ellipsoid is a regular mathematical figure that approximates the irregular shape of the geoid. A number of reference ellipsoids are defined for use in the field of geodesy. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Tides are a significant contributor to the observed sea surface height. The load tidal component of sea surface height describes the variability of the sea surface due to the deformation of the Earth because of the weight of the water masses displaced by ocean tides. + + + + m + + + Sea surface height is a time-varying quantity. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + + kg m-2 @@ -2895,7 +2952,7 @@ 1e-3 - "change_over_time_in_X" means change in a quantity X over a time-interval, which should be defined by the bounds of the time coordinate. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "change_over_time_in_X" means change in a quantity X over a time-interval, which should be defined by the bounds of the time coordinate. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -2944,7 +3001,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The clear_sky area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. "Clear sky" means in the absence of clouds. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The clear_sky area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. "Clear sky" means in the absence of clouds. @@ -2958,21 +3015,21 @@ 1 - The albedo of cloud. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. + The albedo of cloud. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. 1 71 E164 clt - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". 1 cl - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -3063,14 +3120,14 @@ 1 72 E185 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Convective cloud is that produced by the convection schemes in an atmosphere model. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Convective cloud is that produced by the convection schemes in an atmosphere model. Cloud area fraction is also called "cloud amount" and "cloud cover". 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Standard names also exist for high, medium and low cloud types. Convective cloud is that produced by the convection schemes in an atmosphere model. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Standard names also exist for high, medium and low cloud types. Convective cloud is that produced by the convection schemes in an atmosphere model. Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -3112,21 +3169,21 @@ kg m-2 63 - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. Convective precipitation is that produced by the convection schemes in an atmosphere model. + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. Convective precipitation is that produced by the convection schemes in an atmosphere model. kg m-2 s-1 prc - Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. m s-1 - "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. + "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. @@ -3168,7 +3225,7 @@ s-1 - The Coriolis parameter is twice the component of the earth's angular velocity about the local vertical i.e. 2 W sin L, where L is latitude and W the angular speed of the earth. + The Coriolis parameter is twice the component of the earth's angular velocity about the local vertical i.e. 2 W sin L, where L is latitude and W the angular speed of the earth. @@ -3238,7 +3295,7 @@ m - Depth is the vertical distance below the surface. 'Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The concentration of any chemical species, whether particulate or dissolved, may vary with depth in the ocean. A depth profile may go through one or more local minima in concentration. The depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water is the depth of the local minimum in the oxygen concentration that occurs closest to the sea surface. + Depth is the vertical distance below the surface. 'Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The concentration of any chemical species, whether particulate or dissolved, may vary with depth in the ocean. A depth profile may go through one or more local minima in concentration. The depth_at_shallowest_local_minimum_in_vertical_profile_of_mole_concentration_of_dissolved_molecular_oxygen_in_sea_water is the depth of the local minimum in the oxygen concentration that occurs closest to the sea surface. @@ -3364,7 +3421,7 @@ degrees - The phrase "direction_of_X" means direction of a vector, a bearing. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. In that case, "displacement" is also the distance across the earth's surface calculated from the change in a moving object's geospatial position between the start and end of the time interval associated with the displacement variable. The "direction of displacement" is the angle between due north and the displacement vector. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + The phrase "direction_of_X" means direction of a vector, a bearing. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. In that case, "displacement" is also the distance across the earth's surface calculated from the change in a moving object's geospatial position between the start and end of the time interval associated with the displacement variable. The "direction of displacement" is the angle between due north and the displacement vector. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -3378,7 +3435,7 @@ m - A measure of distance from the Earth's geocenter, commonly used in satellite tracks. + A measure of distance from the Earth's geocenter, commonly used in satellite tracks. @@ -3539,14 +3596,14 @@ Pa - "Downward" indicates a vector component which is positive when directed downward (negative upward). "x" indicates a vector component along the grid x-axis, positive with increasing x. A downward x stress is a downward flux of momentum towards the positive direction of the model's x-axis. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. + "Downward" indicates a vector component which is positive when directed downward (negative upward). "x" indicates a vector component along the grid x-axis, positive with increasing x. A downward x stress is a downward flux of momentum towards the positive direction of the model's x-axis. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. Pa - "Downward" indicates a vector component which is positive when directed downward (negative upward). "x" indicates a vector component along the grid x-axis, positive with increasing x. A downward x stress is a downward flux of momentum towards the positive direction of the model's x-axis. A positive correction is downward i.e. added to the ocean. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. + "Downward" indicates a vector component which is positive when directed downward (negative upward). "x" indicates a vector component along the grid x-axis, positive with increasing x. A downward x stress is a downward flux of momentum towards the positive direction of the model's x-axis. A positive correction is downward i.e. added to the ocean. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. @@ -3560,14 +3617,14 @@ Pa - "Downward" indicates a vector component which is positive when directed downward (negative upward). "y" indicates a vector component along the grid y-axis, positive with increasing y. A downward y stress is a downward flux of momentum towards the positive direction of the model's y-axis. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. + "Downward" indicates a vector component which is positive when directed downward (negative upward). "y" indicates a vector component along the grid y-axis, positive with increasing y. A downward y stress is a downward flux of momentum towards the positive direction of the model's y-axis. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. Pa - "Downward" indicates a vector component which is positive when directed downward (negative upward). "y" indicates a vector component along the grid y-axis, positive with increasing y. A downward y stress is a downward flux of momentum towards the positive direction of the model's y-axis. A positive correction is downward i.e. added to the ocean. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. + "Downward" indicates a vector component which is positive when directed downward (negative upward). "y" indicates a vector component along the grid y-axis, positive with increasing y. A downward y stress is a downward flux of momentum towards the positive direction of the model's y-axis. A positive correction is downward i.e. added to the ocean. The phrase "sea water surface" means the upper boundary of the liquid portion of an ocean or sea, including the boundary to floating ice if present. @@ -3588,14 +3645,14 @@ W/m2 - Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -3756,14 +3813,14 @@ W/m2 - Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -3966,7 +4023,7 @@ m - "Eastward" indicates a vector component which is positive when directed eastward (negative westward). "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. An eastward displacement is the distance calculated from the change in a moving object's longitude between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "Eastward" indicates a vector component which is positive when directed eastward (negative westward). "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. An eastward displacement is the distance calculated from the change in a moving object's longitude between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -4267,7 +4324,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A "floating ice shelf", sometimes called a "floating ice sheet", indicates where an ice sheet extending from a land area flows over sea water. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A "floating ice shelf", sometimes called a "floating ice sheet", indicates where an ice sheet extending from a land area flows over sea water. @@ -4302,7 +4359,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Fog means water droplets or minute ice crystals close to the surface which reduce visibility in air to less than 1000m. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Fog means water droplets or minute ice crystals close to the surface which reduce visibility in air to less than 1000m. @@ -4337,7 +4394,7 @@ 1 - "Fraction of time" is the fraction of a time period defined by the bounds of the time coordinate variable for which a characteristic of interest exists. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. Sea ice area fraction is area of the sea surface occupied by sea ice. The area threshold value must be specified by supplying a coordinate variable or scalar coordinate variable with the standard name of sea_ice_area_fraction. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "Fraction of time" is the fraction of a time period defined by the bounds of the time coordinate variable for which a characteristic of interest exists. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. The area threshold value must be specified by supplying a coordinate variable or scalar coordinate variable with the standard name of sea_ice_area_fraction. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -4466,6 +4523,13 @@ Global average sea level change is due to change in volume of the water in the ocean, caused by mass and/or density change, or to change in the volume of the ocean basins, caused by tectonics etc. It is sometimes called "eustatic", which is a term that also has other definitions. It differs from the change in the global average sea surface height relative to the centre of the Earth by the global average vertical movement of the ocean floor. Zero sea level change is an arbitrary level. Because global average sea level change quantifies the change in volume of the world ocean, it is not calculated necessarily by considering local changes in mean sea level. + + m + + + Global average mass volume sea level change is caused by water mass balance (evaporation – precipitation + runoff). This in turn results in a change in volume of the world ocean. Zero sea level change is an arbitrary level. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Because global average sea level change quantifies the change in volume of the world ocean, it is not calculated necessarily by considering local changes in mean sea level. This quantity is sometimes called "barystatic sea level rise" or "barystatic sea level change". It is the part of global-mean sea-level rise which is due to the addition to the ocean of water mass that formerly resided within the land area (as land water storage or land ice) or in the atmosphere (which contains a relatively tiny mass of water). + + m @@ -4568,14 +4632,14 @@ s-1 - The "gross rate of decrease in area fraction" is the fraction of a grid cell that transitions from a given area type per unit time, for example, as a result of land use changes. The quantity described by this standard name is a gross decrease because it includes only land where the use transitions away from the given area type and excludes land that transitions to that area type during the same period. The area type should be specified using a coordinate of scalar coordinate variable with standard name area_type. There is also a standard name for gross_rate_of_increase_in_area_fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area + The "gross rate of decrease in area fraction" is the fraction of a grid cell that transitions from a given area type per unit time, for example, as a result of land use changes. The quantity described by this standard name is a gross decrease because it includes only land where the use transitions away from the given area type and excludes land that transitions to that area type during the same period. The area type should be specified using a coordinate of scalar coordinate variable with standard name area_type. There is also a standard name for gross_rate_of_increase_in_area_fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. s-1 - The "rate of increase in area fraction" is the fraction of a grid cell that transitions to a given area type per unit time, for example, as a result of land use changes. The quantity described by this standard name is a gross increase because it includes only land where the use transitions to the given area type and excludes land that transitions away from that area type during the same period. The area type should be specified using a coordinate or scalar coordinate variable with standard name area_type. There is also a standard name for gross_rate_of_decrease_in_area_fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. + The "rate of increase in area fraction" is the fraction of a grid cell that transitions to a given area type per unit time, for example, as a result of land use changes. The quantity described by this standard name is a gross increase because it includes only land where the use transitions to the given area type and excludes land that transitions away from that area type during the same period. The area type should be specified using a coordinate or scalar coordinate variable with standard name area_type. There is also a standard name for gross_rate_of_decrease_in_area_fraction. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. @@ -4589,7 +4653,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Grounded ice sheet" indicates where the ice sheet rests over bedrock and is thus grounded. It excludes ice-caps, glaciers and floating ice shelves. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Grounded ice sheet" indicates where the ice sheet rests over bedrock and is thus grounded. It excludes ice-caps, glaciers and floating ice shelves. @@ -4792,7 +4856,7 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. High type clouds are: Cirrus, Cirrostratus, Cirrocumulus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. High type clouds are: Cirrus, Cirrostratus, Cirrocumulus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -4834,14 +4898,14 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. Cloud area fraction is also called "cloud amount" and "cloud cover". 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -4890,7 +4954,7 @@ 1e-3 kg m-2 - The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. The phrase "wrt" means "with respect to". The phrase "product_of_X_and_Y" means X*Y. Depth is the vertical distance below the surface. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. Practical salinity units are dimensionless. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. Sea water density is the in-situ density (not the potential density). For Boussinesq models, density is the constant Boussinesq reference density, a quantity which has the standard name reference_sea_water_density_for_boussinesq_approximation. + The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. The phrase "wrt" means "with respect to". The phrase "product_of_X_and_Y" means X*Y. Depth is the vertical distance below the surface. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. Practical salinity units are dimensionless. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. Sea water density is the in-situ density (not the potential density). For Boussinesq models, density is the constant Boussinesq reference density, a quantity which has the standard name reference_sea_water_density_for_boussinesq_approximation. @@ -4911,14 +4975,14 @@ mol m-2 s-1 - The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "wrt" means with respect to. Depth is the vertical distance below the surface."tendency_of_X" means derivative of X with respect to time. 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). + The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "wrt" means with respect to. Depth is the vertical distance below the surface."tendency_of_X" means derivative of X with respect to time. 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). mol m-2 s-1 - The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "wrt" means with respect to. "tendency_of_X" means derivative of X with respect to time. Depth is the vertical distance below the surface. 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. + The phrase "integral_wrt_X_of_Y" means int Y dX. To specify the limits of the integral the data variable should have an axis for X and associated coordinate bounds. If no axis for X is associated with the data variable, or no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is air the integral is assumed to be calculated over the full depth of the atmosphere. "wrt" means with respect to. "tendency_of_X" means derivative of X with respect to time. Depth is the vertical distance below the surface. 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. @@ -7620,7 +7684,7 @@ 1 clisccp - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. The ISCCP cloud area fraction is diagnosed from atmosphere model output by the ISCCP simulator software in such a way as to be comparable with the observational diagnostics of ISCCP (the International Satellite Cloud Climatology Project). Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. The ISCCP cloud area fraction is diagnosed from atmosphere model output by the ISCCP simulator software in such a way as to be comparable with the observational diagnostics of ISCCP (the International Satellite Cloud Climatology Project). Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -7697,7 +7761,7 @@ 1 81 sftlf - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. @@ -7718,7 +7782,7 @@ 1 sftgif - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. @@ -7998,14 +8062,14 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. "Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names also exist for high, medium and low cloud types. "Cloud area fraction is also called "cloud amount" and "cloud cover". 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be "model_level_number", but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -8110,14 +8174,14 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Low type clouds are: Stratus, Stratocumulus, Cumulus, Cumulonimbus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Low type clouds are: Stratus, Stratocumulus, Cumulus, Cumulonimbus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". m s-1 - Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. + Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. @@ -8131,7 +8195,7 @@ m s-1 - "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. + "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. @@ -8145,7 +8209,7 @@ m s-1 - Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. + Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. "Precipitation rate" means the depth or thickness of the layer formed by precipitation per unit time. @@ -8173,7 +8237,7 @@ m E143 - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. Convective precipitation is that produced by the convection schemes in an atmosphere model. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. @@ -8201,7 +8265,7 @@ m - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. @@ -8222,7 +8286,7 @@ m E142 - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. @@ -8313,7 +8377,7 @@ kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19'-hexanoyloxyfucoxanthin is C48H68O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/HEXAXXXX/2/. + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19'-hexanoyloxyfucoxanthin is C48H68O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/HEXAXXXX/2/. @@ -8348,14 +8412,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. @@ -8369,14 +8433,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. @@ -8390,14 +8454,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. @@ -8411,7 +8475,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. @@ -8432,7 +8496,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aromatic compounds in organic chemistry are compounds that contain at least one benzene ring of six carbon atoms joined by alternating single and double covalent bonds. The simplest aromatic compound is benzene itself. In standard names "aromatic_compounds" is the term used to describe the group of aromatic chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual aromatic species, e.g. benzene and xylene. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aromatic compounds in organic chemistry are compounds that contain at least one benzene ring of six carbon atoms joined by alternating single and double covalent bonds. The simplest aromatic compound is benzene itself. In standard names "aromatic_compounds" is the term used to describe the group of aromatic chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual aromatic species, e.g. benzene and xylene. @@ -8446,28 +8510,28 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. @@ -8481,7 +8545,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. @@ -8523,21 +8587,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine chloride is BrCl. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine chloride is BrCl. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine monoxide is BrO. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine monoxide is BrO. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine nitrate is BrONO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine nitrate is BrONO2. @@ -8551,7 +8615,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. @@ -8572,7 +8636,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for carbon dioxide is CO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for carbon dioxide is CO2. @@ -8607,14 +8671,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon monoxide is CO. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon monoxide is CO. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. @@ -8670,21 +8734,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine dioxide is OClO. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine dioxide is OClO. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine monoxide is ClO. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine monoxide is ClO. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. @@ -8705,7 +8769,7 @@ kg m-3 - 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. + 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. @@ -8754,7 +8818,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. @@ -8789,7 +8853,7 @@ kg m-3 - "Mass concentration" means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as "nox_expressed_as_nitrogen". Cobalt means cobalt in all chemical forms, commonly referred to as "total cobalt". "Sea floor sediment" is sediment deposited at the sea bed. + "Mass concentration" means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as "nox_expressed_as_nitrogen". Cobalt means cobalt in all chemical forms, commonly referred to as "total cobalt". "Sea floor sediment" is sediment deposited at the sea bed. @@ -8852,21 +8916,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dichlorine peroxide is Cl2O2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dichlorine peroxide is Cl2O2. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. @@ -8929,28 +8993,28 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethanol is C2H5OH. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethanol is C2H5OH. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. @@ -8971,14 +9035,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. @@ -8992,14 +9056,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s) in a salt or to other atom(s) in a molecule. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s) in a salt or to other atom(s) in a molecule. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for mercury is Hg. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for mercury is Hg. @@ -9041,14 +9105,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. @@ -9062,42 +9126,42 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hexachlorobiphenyl is C12H4Cl6. This structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hexachlorobiphenyl is C12H4Cl6. This structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen bromide is HBr. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen bromide is HBr. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen chloride is HCl. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen chloride is HCl. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen cyanide is HCN. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen cyanide is HCN. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen peroxide is H2O2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen peroxide is H2O2. @@ -9118,14 +9182,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypobromous acid is HOBr. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypobromous acid is HOBr. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypochlorous acid is HOCl. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypochlorous acid is HOCl. @@ -9146,7 +9210,7 @@ kg m-3 - 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. 'Inorganic nitrogen' describes a family of chemical species which, in an ocean model, usually includes nitrite, nitrate and ammonium which act as nitrogen nutrients. 'Inorganic nitrogen' is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. 'Inorganic nitrogen' describes a family of chemical species which, in an ocean model, usually includes nitrite, nitrate and ammonium which act as nitrogen nutrients. 'Inorganic nitrogen' is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. @@ -9223,35 +9287,35 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methanol is CH3OH. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methanol is CH3OH. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl hydroperoxide is CH3OOH. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl hydroperoxide is CH3OOH. @@ -9279,7 +9343,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. @@ -9321,7 +9385,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitric acid is HNO3. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitric acid is HNO3. @@ -9335,7 +9399,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen dioxide is NO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen dioxide is NO2. @@ -9349,21 +9413,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen monoxide is NO. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen monoxide is NO. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous acid is HNO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous acid is HNO2. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous oxide is N2O. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous oxide is N2O. @@ -9377,14 +9441,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -9426,21 +9490,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Oxygenated" means containing oxygen. "Hydrocarbon" means a compound containing hydrogen and carbon. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Oxygenated" means containing oxygen. "Hydrocarbon" means a compound containing hydrogen and carbon. @@ -9454,7 +9518,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. @@ -9475,14 +9539,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxyacetyl nitrate, sometimes referred to as PAN, is CH3COO2NO2. The IUPAC name for peroxyacetyl_nitrate is nitroethaneperoxoate. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxyacetyl nitrate, sometimes referred to as PAN, is CH3COO2NO2. The IUPAC name for peroxyacetyl_nitrate is nitroethaneperoxoate. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxynitric acid, sometimes referred to as PNA, is HO2NO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxynitric acid, sometimes referred to as PNA, is HO2NO2. @@ -9524,7 +9588,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. @@ -9555,6 +9619,69 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. To specify the relative humidity and temperature at which the particle size applies, provide scalar coordinate variables with the standard names of, respectively, "relative_humidity" and "air_temperature." + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for ammonium is NH4+. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the bromide anion is Br-. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the calcium dication is Ca(2+). + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for chloride is Cl-. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the magnesium dication is Mg(2+). + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for methanesulfonic acid is CH3SO3H. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the nitrate anion is NO3-. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the oxalate dianion is C2O4(2-). + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the potassium cation is K+. + + kg m-3 @@ -9569,6 +9696,20 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the sodium cation is Na+. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm10 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 10 micrometers. The chemical formula for the sulfate anion is SO4(2-). + + kg m-3 @@ -9622,21 +9763,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for radon is Rn. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for radon is Rn. @@ -9671,7 +9812,7 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. @@ -9699,21 +9840,21 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for sulfur dioxide is SO2. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for sulfur dioxide is SO2. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. @@ -9755,14 +9896,14 @@ kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. kg m-3 - Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. @@ -9972,49 +10113,49 @@ 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. @@ -10035,42 +10176,42 @@ 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aromatic compounds in organic chemistry are compounds that contain at least one benzene ring of six carbon atoms joined by alternating single and double covalent bonds. The simplest aromatic compound is benzene itself. In standard names "aromatic_compounds" is the term used to describe the group of aromatic chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual aromatic species, e.g. benzene and xylene. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aromatic compounds in organic chemistry are compounds that contain at least one benzene ring of six carbon atoms joined by alternating single and double covalent bonds. The simplest aromatic compound is benzene itself. In standard names "aromatic_compounds" is the term used to describe the group of aromatic chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual aromatic species, e.g. benzene and xylene. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. @@ -10210,7 +10351,7 @@ 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally; all contain a chlorin ring which gives the green pigment and a side chain whose structure varies. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally; all contain a chlorin ring which gives the green pigment and a side chain whose structure varies. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. @@ -10385,7 +10526,7 @@ 1 - "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. @@ -10938,7 +11079,7 @@ 1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. @@ -10980,7 +11121,7 @@ 1 - "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. @@ -11015,7 +11156,7 @@ 1 - "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. + "Mass fraction" is used in the construction "mass_fraction_of_X_in_Y'', where X is a material constituent of Y. It is evaluated as the mass of X divided by the mass of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Grain-size class distribution is based on the Udden-Wentworth scale. @@ -11155,21 +11296,21 @@ 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Middle type clouds are: Altostratus, Altocumulus, Nimbostratus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Middle type clouds are: Altostratus, Altocumulus, Nimbostratus. X_type_cloud_area_fraction is generally determined on the basis of cloud type, though Numerical Weather Prediction (NWP) models often calculate them based on the vertical location of the cloud. For the cloud area fraction between specified levels in the atmosphere, standard names including "cloud_area_fraction_in_atmosphere_layer" are used. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. Cloud area fraction is also called "cloud amount" and "cloud cover". m - Depth is the vertical distance below the surface. 'Undersaturation' means that a solution is unsaturated with respect to a solute. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. The "minimum depth of undersaturation", sometimes called the "saturation horizon", is the shallowest depth at which a body of water is an undersaturated solution of a named solute. + Depth is the vertical distance below the surface. 'Undersaturation' means that a solution is unsaturated with respect to a solute. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. The "minimum depth of undersaturation", sometimes called the "saturation horizon", is the shallowest depth at which a body of water is an undersaturated solution of a named solute. m - Depth is the vertical distance below the surface. 'Undersaturation' means that a solution is unsaturated with respect to a solute. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. The "minimum depth of undersaturation", sometimes called the "saturation horizon", is the shallowest depth at which a body of water is an undersaturated solution of a named solute. + Depth is the vertical distance below the surface. 'Undersaturation' means that a solution is unsaturated with respect to a solute. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. The "minimum depth of undersaturation", sometimes called the "saturation horizon", is the shallowest depth at which a body of water is an undersaturated solution of a named solute. @@ -11459,6 +11600,27 @@ The modified Fosberg Fire Weather Index (mFFWI) is a measure of the potential effect of weather conditions on wildland fire. The Fosberg Fire Weather Index is a function of temperature, wind, and humidity. It is modified with a fuel availability factor based on the Keetch Byram Drought Index. + + 1 + + + The MODIS cloud area fraction is diagnosed from atmosphere model output by the MODIS simulator software in such a way as to be comparable with the observational diagnostics of MODIS (Moderate Resolution Imaging Spectroradiometer). Cloud area fraction is also called “cloud amount” and “cloud cover.” As seen from above, mean fraction of grid column occupied by cloud of optical depths and heights specified by the tau and pressure intervals given above. Dimensions of the histogram are cloud top pressure and cloud optical depth. To distinguish that these are cloud area fractions as seen by a specific satellite instrument simulator, not the same as cloud area fractions diagnosed by the native model, the prefix format of satellite name_ is employed. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. + + + + 1 + + + Ice means ice-topped clouds, as seen by the MODIS simulator. To distinguish that these are cloud area fractions as seen by a specific satellite instrument simulator, not the same as cloud area fractions diagnosed by the native model, the prefix format of satellite name_ is employed. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. + + + + 1 + + + Liquid means liquid-topped clouds, as seen by the MODIS simulator. To distinguish that these are cloud area fractions as seen by a specific satellite instrument simulator, not the same as cloud area fractions diagnosed by the native model, the prefix format of satellite name_ is employed. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. + + kg m-2 @@ -11470,14 +11632,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for acetic_acid is CH3COOH. The IUPAC name for acetic acid is ethanoic acid. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. @@ -11491,21 +11653,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_hexachlorocyclohexane is C6H6Cl6. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ammonia is NH3. @@ -11526,7 +11688,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. @@ -11540,21 +11702,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic bromine is Br. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic chlorine is Cl. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for atomic nitrogen is N. @@ -11568,14 +11730,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. @@ -11603,21 +11765,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine chloride is BrCl. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine chloride is BrCl. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine monoxide is BrO. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine monoxide is BrO. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine nitrate is BrONO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for bromine nitrate is BrONO2. @@ -11631,7 +11793,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. @@ -11645,7 +11807,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. @@ -11694,14 +11856,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for carbon dioxide is CO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for carbon dioxide is CO2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon monoxide is CO. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon monoxide is CO. @@ -11771,21 +11933,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine dioxide is OClO. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine dioxide is OClO. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine monoxide is ClO. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine monoxide is ClO. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for chlorine nitrate is ClONO2. @@ -11799,7 +11961,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Diatoms are single-celled phytoplankton with an external skeleton made of silica.Phytoplankton are autotrophic prokaryotic or eukaryotic algae that live near the water surface where there is sufficient light to support photosynthesis. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Diatoms are single-celled phytoplankton with an external skeleton made of silica.Phytoplankton are autotrophic prokaryotic or eukaryotic algae that live near the water surface where there is sufficient light to support photosynthesis. @@ -11820,14 +11982,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dichlorine peroxide is Cl2O2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dichlorine peroxide is Cl2O2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. @@ -11841,7 +12003,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. @@ -11960,14 +12122,14 @@ mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen' or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Sea floor sediment" is sediment deposited at the sea bed. "Water" means water in all phases. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen' or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. "Sea floor sediment" is sediment deposited at the sea bed. "Water" means water in all phases. mol m-3 - "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen' or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen' or a phrase such as "nox_expressed_as_nitrogen". "Organic carbon" describes a family of chemical species and is the term used in standard names for all species belonging to the family that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. @@ -11995,21 +12157,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes.There are standard names for the alkane group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes.There are standard names for the alkane group as well as for some of the individual species. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethanol is C2H5OH. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethanol is C2H5OH. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes.There are standard names for the alkene group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes.There are standard names for the alkene group as well as for some of the individual species. @@ -12023,28 +12185,28 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s)in a salt or to other atom(s) in a molecule. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s)in a salt or to other atom(s) in a molecule. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for mercury is Hg. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for mercury is Hg. @@ -12093,14 +12255,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. @@ -12114,42 +12276,42 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hexachlorobiphenyl is C12H4Cl6. This structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hexachlorobiphenyl is C12H4Cl6. This structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen bromide is HBr. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen bromide is HBr. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen chloride is HCl. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen chloride is HCl. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen cyanide is HCN. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen cyanide is HCN. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen peroxide is H2O2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hydrogen peroxide is H2O2. @@ -12177,14 +12339,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypobromous acid is HOBr. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypobromous acid is HOBr. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypochlorous acid is HOCl. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for hypochlorous acid is HOCl. @@ -12219,49 +12381,49 @@ mol m-3 - Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where Xis a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mesozooplankton are zooplankton ranging between 20 micrometers and 200 micrometers in size. + Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where Xis a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Mesozooplankton are zooplankton ranging between 20 micrometers and 200 micrometers in size. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated in terms of B alone, neglecting all other chemical constituents of A. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated in terms of B alone, neglecting all other chemical constituents of A. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methanol is CH3OH. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methanol is CH3OH. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl hydroperoxide is CH3OOH. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for methyl hydroperoxide is CH3OOH. @@ -12275,14 +12437,14 @@ mol m-3 - Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where Xis a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Microzooplankton are zooplankton of less than 20 micrometers in size. + Mole concentration' means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where Xis a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Microzooplankton are zooplankton of less than 20 micrometers in size. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated in terms of B alone, neglecting all other chemical constituents of A. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated in terms of B alone, neglecting all other chemical constituents of A. @@ -12303,14 +12465,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for the nitrate anion is NO3-. The chemical formula for the nitrite anion is NO2-. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for the nitrate anion is NO3-. The chemical formula for the nitrite anion is NO2-. @@ -12331,7 +12493,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitric acid is HNO3. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitric acid is HNO3. @@ -12345,35 +12507,35 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for the nitrite anion is NO2-. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for the nitrite anion is NO2-. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen dioxide is NO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen dioxide is NO2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen monoxide is NO. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrogen monoxide is NO. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous acid is HNO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous acid is HNO2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous oxide is N2O. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for nitrous oxide is N2O. @@ -12401,28 +12563,28 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'.The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. Organic detritus are particles of debris from decaying plants and animals. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. @@ -12548,14 +12710,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxynitric acid, sometimes referred to as PNA, is HO2NO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for peroxynitric acid, sometimes referred to as PNA, is HO2NO2. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. @@ -12639,21 +12801,21 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for radon is Rn. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical symbol for radon is Rn. @@ -12667,7 +12829,7 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for sulfur dioxide is SO2. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for sulfur dioxide is SO2. @@ -12688,14 +12850,14 @@ mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. mol m-3 - Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. @@ -12779,7 +12941,7 @@ 1 - Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. + Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkanes are saturated hydrocarbons, i.e. they do not contain any chemical double bonds. Alkanes contain only hydrogen and carbon combined in the general proportions C(n)H(2n+2); "alkanes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkane species, e.g., methane and ethane. @@ -13367,7 +13529,7 @@ 1 - Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of hydrogen sulfide is H2S. + Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of hydrogen sulfide is H2S. @@ -14120,6 +14282,20 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 100 hour fuel moisture (FM100) represents the modeled moisture content of dead fuels in the 1 to 3 inch diameter class. It can also be used as a very rough estimate of the average moisture content of the forest floor from three-fourths inch to 4 inches below the surface. The 100-hour timelag fuel moisture is a function of length of day (as influenced by latitude and calendar date), maximum and minimum temperature and relative humidity, and precipitation duration in the previous 24 hours. It is a component in the US National Fire Danger Rating System. The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + % + + + 10 hour fuel moisture (FM10) represents the modeled moisture content of dead fuels consisting of roundwood in the size range of one quarter to 1 inch in diameter and very roughly, the layer of litter extending from just below the surface to three-quarters of inch below the surface. The 10-hour timelag fuel moisture is a function of length of day (as influenced by latitude and calendar date), daily downwelling shortwave radiation, daily maximum temperature and minimum relative humidity, and daily precipitation values. It is a component in the US National Fire Danger Rating System (cf. https://www.bia.gov/sites/default/files/dup/assets/public/pdf/idc-020513.pdf). The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute. + + + + % + + + 1 hour fuel moisture (FM1) represents the modeled moisture content of dead fuels consisting of herbaceous plants or roundwood less than one-quarter inch in diameter. It also includes the uppermost layer of litter on the forest floor. The 1-hour timelag fuel moisture is a function of length of day (as influenced by latitude and calendar date), daily downwelling shortwave radiation, daily maximum temperature and minimum relative humidity, and daily precipitation values. It is a component in the US National Fire Danger Rating System (cf. https://www.bia.gov/sites/default/files/dup/assets/public/pdf/idc-020513.pdf). The US National Fire Danger Rating System comprises several numeric indexes that rate the potential over a large area for wildland fires to ignite, spread, and require action to suppress or manage. It was designed for use in the continental United States, and all its components are relative, not absolute . + + 1 @@ -14481,7 +14657,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m - "Northward" indicates a vector component which is positive when directed northward (negative southward). "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. A northward displacement is the distance calculated from the change in a moving object's latitude between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "Northward" indicates a vector component which is positive when directed northward (negative southward). "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. A northward displacement is the distance calculated from the change in a moving object's latitude between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -14733,7 +14909,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". + The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The abbreviation "lwe" means liquid water equivalent. A variable whose standard name has the form number_of_days_with_X_below|above_threshold is a count of the number of days on which the condition X_below|above_threshold is satisfied. It must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_methods entry for within days which describes the processing of quantity X before the threshold is applied. A number_of_days is an extensive quantity in time, and the cell_methods entry for over days should be "sum". @@ -14820,6 +14996,13 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. See Appendix D of the CF convention for information about parametric vertical coordinates. + + m + + + Ocean dynamic sea level is the contribution to sea surface height variability made by processes other than astronomic forcing of the ocean and shallow water resonance of tidal components, or variations in air pressure. Sea surface height is a time-varying quantity. + + W @@ -15275,18 +15458,11 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. - - 1 - - - "X_volume_fraction" means the fraction of volume occupied by X. It is evaluated as the volume of interest divided by the grid cell volume. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A data variable with standard name ocean_volume_fraction is used to store the fraction of a grid cell underlying sea-water, for example, where part of the grid cell is occupied by land or to record ocean volume on a model's native grid following a regridding operation. - - m3 s-1 - Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. + Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. @@ -15398,7 +15574,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Permafrost is soil or rock that has remained at a temperature at or below zero degrees Celsius throughout the seasonal cycle for two or more years. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Permafrost is soil or rock that has remained at a temperature at or below zero degrees Celsius throughout the seasonal cycle for two or more years. @@ -15461,7 +15637,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. + Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. @@ -15783,7 +15959,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. degree - Platform zenith angle is the the angle between the line of sight to the platform and the local zenith at the observation target. This angle is measured starting from directly overhead and its range is from zero (directly overhead the observation target) to 180 degrees (directly below the observation target). Local zenith is a line perpendicular to the Earth's surface at a given location. "Observation target" means a location on the Earth defined by the sensor performing the observations. A standard name also exists for sensor_zenith_angle. For some viewing geometries the sensor and the platform cannot be assumed to be close enough to neglect the difference in calculated zenith angle. A "platform" is a structure or vehicle that serves as a base for mounting sensors. Platforms include, but are not limited to, satellites, aeroplanes, ships, buoys, instruments, ground stations, and masts. + Platform zenith angle is the the angle between the line of sight to the platform and the local zenith at the observation target. This angle is measured starting from directly overhead and its range is from zero (directly overhead the observation target) to 180 degrees (directly below the observation target). Local zenith is a line perpendicular to the Earth's surface at a given location. "Observation target" means a location on the Earth defined by the sensor performing the observations. A standard name also exists for sensor_zenith_angle. For some viewing geometries the sensor and the platform cannot be assumed to be close enough to neglect the difference in calculated zenith angle. A "platform" is a structure or vehicle that serves as a base for mounting sensors. Platforms include, but are not limited to, satellites, aeroplanes, ships, buoys, instruments, ground stations, and masts. @@ -15811,49 +15987,49 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 61 - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. kg m-2 s-1 59 pr - "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. kg m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "O" means the element "oxygen" and "17O" is the stable isotope "oxygen-17". + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "O" means the element "oxygen" and "17O" is the stable isotope "oxygen-17". kg m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "O" means the element "oxygen" and "18O" is the stable isotope "oxygen-18". + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "O" means the element "oxygen" and "18O" is the stable isotope "oxygen-18". kg m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "H" means the element "hydrogen" and "2H" is the stable isotope "hydrogen-2", usually called "deuterium". The construction "X_containing_single_Y" means the standard name refers to only that part of X composed of molecules containing a single atom of isotope Y. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The chemical formula for water is H2O. "H" means the element "hydrogen" and "2H" is the stable isotope "hydrogen-2", usually called "deuterium". The construction "X_containing_single_Y" means the standard name refers to only that part of X composed of molecules containing a single atom of isotope Y. kg m-2 s-1 prveg - "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. Previously, the qualifier where_type was used to specify that the quantity applies only to the part of the grid box of the named type. Names containing the where_type qualifier are deprecated and newly created data should use the cell_methods attribute to indicate the horizontal area to which the quantity applies. "Canopy" means the vegetative covering over a surface. The canopy is often considered to be the outer surfaces of the vegetation. Plant height and the distribution, orientation and shape of plant leaves within a canopy influence the atmospheric environment and many plant processes within the canopy. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Canopy. + "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. Unless indicated in the cell_methods attribute, a quantity is assumed to apply to the whole area of each horizontal grid box. Previously, the qualifier where_type was used to specify that the quantity applies only to the part of the grid box of the named type. Names containing the where_type qualifier are deprecated and newly created data should use the cell_methods attribute to indicate the horizontal area to which the quantity applies. "Canopy" means the vegetative covering over a surface. The canopy is often considered to be the outer surfaces of the vegetation. Plant height and the distribution, orientation and shape of plant leaves within a canopy influence the atmospheric environment and many plant processes within the canopy. Reference: AMS Glossary http://glossary.ametsoc.org/wiki/Canopy. - A variable with the standard name predominant_precipitation_type_at_surface contains strings which indicate the character of the predominant precipitating hydrometeor at a location or grid cell. These strings have not yet been standardised. Alternatively, the data variable may contain integers which can be translated to strings using flag_values and flag_meanings attributes. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The surface called "surface" means the lower boundary of the atmosphere. + A variable with the standard name predominant_precipitation_type_at_surface contains strings which indicate the character of the predominant precipitating hydrometeor at a location or grid cell. These strings have not yet been standardised. Alternatively, the data variable may contain integers which can be translated to strings using flag_values and flag_meanings attributes. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The surface called "surface" means the lower boundary of the atmosphere. @@ -15881,7 +16057,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m s-1 - "product_of_X_and_Y" means X*Y. A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "product_of_X_and_Y" means X*Y. A velocity is a vector quantity. "Eastward" indicates a vector component which is positive when directed eastward (negative westward). Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -15958,7 +16134,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m s-1 - "product_of_X_and_Y" means X*Y. A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "product_of_X_and_Y" means X*Y. A velocity is a vector quantity. "Northward" indicates a vector component which is positive when directed northward (negative southward). Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -16017,6 +16193,13 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. "product_of_X_and_Y" means X*Y. "specific" means per unit mass. A velocity is a vector quantity. "Upward" indicates a vector component which is positive when directed upward (negative downward). Specific humidity is the mass fraction of water vapor in (moist) air. Upward air velocity is the vertical component of the 3D air velocity vector. + + Pa m s-1 + + + The product of windspeed and vapor pressure deficit is referred to as the Hot-Dry-Windy Index (HDW) for interpreting fire weather. It is a fire weather index that indicates the influence that the atmosphere has on a fire through wind, heat, and moisture (cf. https://www.bia.gov/sites/default/files/dup/assets/public/pdf/idc-020513.pdf). The units of HDW do not have an established physical significance for fire processes. As a fire weather index, it should be expressed in units of hPa m s-1. + + radian @@ -18772,7 +18955,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W - Power of a radio wave, that was transmitted by an instrument and propagates in the air where it's scattered by the air due to which its properties change, and it is received again by an instrument. The "instrument" (examples are radar and lidar) is the device used to make the observation. The "scatterers" are what causes the transmitted signal to be returned to the instrument (examples are aerosols, hydrometeors and refractive index irregularities in the air). A standard name referring to the received power of the signal at the instrument. + Power of a radio wave, that was transmitted by an instrument and propagates in the air where it's scattered by the air due to which its properties change, and it is received again by an instrument. The "instrument" (examples are radar and lidar) is the device used to make the observation. The "scatterers" are what causes the transmitted signal to be returned to the instrument (examples are aerosols, hydrometeors and refractive index irregularities in the air). A standard name referring to the received power of the signal at the instrument. + + + + W + + + The "instrument" (examples are radar and lidar) is the device used to make the observation. The "scatterers" are what causes the transmitted signal to be returned to the instrument (examples are aerosols, hydrometeors and refractive index irregularities), of whatever kind the instrument detects. A standard name referring to the recieved power of the signal at the instrument. @@ -18793,7 +18983,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol/mol - This ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. + This ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. Mole fraction is used in the construction mole_fraction_of_X_in_Y, where X is a material constituent of Y. @@ -18968,7 +19158,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. @@ -19031,7 +19221,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - The albedo of sea ice. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + The albedo of sea ice. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19059,14 +19249,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 91 sic - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. m2 s-1 - Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19150,7 +19340,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1e-3 - Sea ice salinity is the salt content of sea ice, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + Sea ice salinity is the salt content of sea ice, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19192,7 +19382,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg s-1 - Transport across_line means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + Transport across_line means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19206,14 +19396,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m - "x" indicates a vector component along the grid x-axis, positive with increasing x. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. An x displacement is calculated from the difference in the moving object's grid x coordinate between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "x" indicates a vector component along the grid x-axis, positive with increasing x. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. An x displacement is calculated from the difference in the moving object's grid x coordinate between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. N m-2 - "x" indicates a vector component along the grid x-axis, positive with increasing x. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In meteorology and oceanography, the Coriolis effect per unit mass arises solely from the earth's rotation and acts as a deflecting force, normal to the velocity, to the right of the motion in the Northern Hemisphere and to the left in the Southern Hemisphere. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Coriolis_force. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "x" indicates a vector component along the grid x-axis, positive with increasing x. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In meteorology and oceanography, the Coriolis effect per unit mass arises solely from the earth's rotation and acts as a deflecting force, normal to the velocity, to the right of the motion in the Northern Hemisphere and to the left in the Southern Hemisphere. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Coriolis_force. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19248,14 +19438,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m - "y" indicates a vector component along the grid y-axis, positive with increasing y. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. A y displacement is calculated from the difference in the moving object's grid y coordinate between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "y" indicates a vector component along the grid y-axis, positive with increasing y. "Displacement" means the change in geospatial position of an object that has moved over time. If possible, the time interval over which the motion took place should be specified using a bounds variable for the time coordinate variable. A displacement can be represented as a vector. Such a vector should however not be interpreted as describing a rectilinear, constant speed motion but merely as an indication that the start point of the vector is found at the tip of the vector after the time interval associated with the displacement variable. A displacement does not prescribe a trajectory. Sea ice displacement can be defined as a two-dimensional vector, with no vertical component. A y displacement is calculated from the difference in the moving object's grid y coordinate between the start and end of the time interval associated with the displacement variable. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. N m-2 - "y" indicates a vector component along the grid y-axis, positive with increasing y. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In meteorology and oceanography, the Coriolis effect per unit mass arises solely from the earth's rotation and acts as a deflecting force, normal to the velocity, to the right of the motion in the Northern Hemisphere and to the left in the Southern Hemisphere. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Coriolis_force. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + "y" indicates a vector component along the grid y-axis, positive with increasing y. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In meteorology and oceanography, the Coriolis effect per unit mass arises solely from the earth's rotation and acts as a deflecting force, normal to the velocity, to the right of the motion in the Northern Hemisphere and to the left in the Southern Hemisphere. Reference: American Meteorological Society Glossary http://glossary.ametsoc.org/wiki/Coriolis_force. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. @@ -19374,7 +19564,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m - The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Tides are a significant contributor to the observed sea surface height; the pole tide occurs due to variations in the earth's rotation. + The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Tides are a significant contributor to the observed sea surface height; the pole tide occurs due to variations in the earth's rotation. @@ -19479,7 +19669,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1e-3 - Sea surface salinity is the salt content of sea water close to the sea surface, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. Sea surface salinity is often abbreviated as "SSS". For the salinity of sea water at a particular depth or layer, a data variable of "sea_water_salinity" or one of the more precisely defined salinities should be used with a vertical coordinate axis. There are standard names for the precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + Sea surface salinity is the salt content of sea water close to the sea surface, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. Sea surface salinity is often abbreviated as "SSS". For the salinity of sea water at a particular depth or layer, a data variable of "sea_water_salinity" or one of the more precisely defined salinities should be used with a vertical coordinate axis. There are standard names for the precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -20088,7 +20278,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol m-3 - 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). + 'sea_water_alkalinity_expressed_as_mole_equivalent' is the total alkalinity equivalent concentration (including carbonate, nitrogen, silicate, and borate components). @@ -20137,7 +20327,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. S m-1 - The electrical conductivity of sea water in a sample measured at a defined reference temperature. The reference temperature should be recorded in a scalar coordinate variable, or a coordinate variable with a single dimension of size one, and the standard name of temperature_of_analysis_of_sea_water. This quantity is sometimes called 'specific conductivity' when the reference temperature 25 degrees Celsius. + The electrical conductivity of sea water in a sample measured at a defined reference temperature. The reference temperature should be recorded in a scalar coordinate variable, or a coordinate variable with a single dimension of size one, and the standard name of temperature_of_analysis_of_sea_water. This quantity is sometimes called 'specific conductivity' when the reference temperature 25 degrees Celsius. @@ -20193,7 +20383,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - 'sea_water_pH_reported_on_total_scale' is the measure of acidity of seawater, defined as the negative logarithm of the concentration of dissolved hydrogen ions plus bisulfate ions in a sea water medium; it can be measured or calculated; when measured the scale is defined according to a series of buffers prepared in artificial seawater containing bisulfate. The quantity may be written as pH(total) = -log([H+](free) + [HSO4-]). + 'sea_water_pH_reported_on_total_scale' is the measure of acidity of seawater, defined as the negative logarithm of the concentration of dissolved hydrogen ions plus bisulfate ions in a sea water medium; it can be measured or calculated; when measured the scale is defined according to a series of buffers prepared in artificial seawater containing bisulfate. The quantity may be written as pH(total) = -log([H+](free) + [HSO4-]). @@ -20305,14 +20495,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1e-3 88 so - Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. 1e-3 - The salinity at the sea floor is that adjacent to the ocean bottom, which would be the deepest grid cell in an ocean model and within the benthic boundary layer for measurements. Sea water salinity is the salt concentration of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. Practical salinity units are dimensionless. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + The salinity at the sea floor is that adjacent to the ocean bottom, which would be the deepest grid cell in an ocean model and within the benthic boundary layer for measurements. Sea water salinity is the salt concentration of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. Practical salinity units are dimensionless. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -20417,7 +20607,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg s-1 - Transport across_line means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. + Transport across_line means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. @@ -20469,6 +20659,13 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. The quantity with standard name "sea_water_volume" is the total volume of liquid seawater in the global oceans, including enclosed seas. + + 1 + + + "X_volume_fraction" means the fraction of volume occupied by X. It is evaluated as the volume of interest divided by the grid cell volume. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. A data variable with standard name sea_water_volume_fraction is used to store the fraction of a grid cell underlying sea-water, for example, where part of the grid cell is occupied by land or to record ocean volume on a model's native grid following a regridding operation. + + m s-1 @@ -20515,21 +20712,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - sensor_band_central_radiation_frequency is the central frequency of a sensor's band, calculated as the first moment of the band's normalized spectral response function. + sensor_band_central_radiation_frequency is the central frequency of a sensor's band, calculated as the first moment of the band's normalized spectral response function. m - sensor_band_central_radiation_wavelength is the central wavelength of a sensor's band, calculated as the first moment of the band's normalized spectral response function. + sensor_band_central_radiation_wavelength is the central wavelength of a sensor's band, calculated as the first moment of the band's normalized spectral response function. m-1 - sensor_band_central_radiation_wavenumber is the central wavenumber of a sensor's band, calculated as the first moment of the band's normalized spectral response function. + sensor_band_central_radiation_wavenumber is the central wavenumber of a sensor's band, calculated as the first moment of the band's normalized spectral response function. @@ -20550,7 +20747,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. degree - sensor_zenith_angle is the angle between the line of sight to the sensor and the local zenith at the observation target. This angle is measured starting from directly overhead and its range is from zero (directly overhead the observation target) to 180 degrees (directly below the observation target). Local zenith is a line perpendicular to the Earth's surface at a given location. "Observation target" means a location on the Earth defined by the sensor performing the observations. A standard name also exists for platform_zenith_angle, where "platform" refers to the vehicle from which observations are made e.g. aeroplane, ship, or satellite. For some viewing geometries the sensor and the platform cannot be assumed to be close enough to neglect the difference in calculated zenith angle. + sensor_zenith_angle is the angle between the line of sight to the sensor and the local zenith at the observation target. This angle is measured starting from directly overhead and its range is from zero (directly overhead the observation target) to 180 degrees (directly below the observation target). Local zenith is a line perpendicular to the Earth's surface at a given location. "Observation target" means a location on the Earth defined by the sensor performing the observations. A standard name also exists for platform_zenith_angle, where "platform" refers to the vehicle from which observations are made e.g. aeroplane, ship, or satellite. For some viewing geometries the sensor and the platform cannot be assumed to be close enough to neglect the difference in calculated zenith angle. @@ -20578,7 +20775,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - Convective precipitation is that produced by the convection schemes in an atmosphere model. Some atmosphere models differentiate between shallow and deep convection. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + Convective precipitation is that produced by the convection schemes in an atmosphere model. Some atmosphere models differentiate between shallow and deep convection. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. @@ -20676,28 +20873,28 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol m-2 s-1 - The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. + The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. mol m-2 s-1 - The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. + The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. mol m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. mol m-2 s-1 - The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. + The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. @@ -20711,14 +20908,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. mol m-2 s-1 - In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. 'Sinking' is the gravitational settling of particulate matter suspended in a liquid. A sinking flux is positive downwards and is calculated relative to the movement of the surrounding fluid. @@ -20732,7 +20929,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Snow "viewable from above" refers to the snow on objects or the ground as viewed from above, which excludes, for example, falling snow flakes and snow obscured by a canopy, vegetative cover, or other features resting on the surface. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Snow "viewable from above" refers to the snow on objects or the ground as viewed from above, which excludes, for example, falling snow flakes and snow obscured by a canopy, vegetative cover, or other features resting on the surface. @@ -20760,14 +20957,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg s-1 - Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice dynamics" refers to advection of sea ice. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + Transport "across_line" means that which crosses a particular line on the Earth's surface; formally this means the integral along the line of the normal component of the transport. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice dynamics" refers to advection of sea ice. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. 1 - Soil albedo is the albedo of the soil surface assuming no snow. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. + Soil albedo is the albedo of the soil surface assuming no snow. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. @@ -20788,7 +20985,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m s-1 - Hydraulic conductivity is the constant k in Darcy's Law q=-k grad h for fluid flow q (volume transport per unit area i.e. velocity) through a porous medium, where h is the hydraulic head (pressure expressed as an equivalent depth of water). + Hydraulic conductivity is the constant k in Darcy's Law q=-k grad h for fluid flow q (volume transport per unit area i.e. velocity) through a porous medium, where h is the hydraulic head (pressure expressed as an equivalent depth of water). @@ -20921,7 +21118,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1e-3 - The quantity with standard name soil_water_salinity is the salt content of soil water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. + The quantity with standard name soil_water_salinity is the salt content of soil water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. @@ -21145,7 +21342,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. Hz - The quantity with standard name spectral_width_of_radio_wave_in_air_scattered_by_air is the frequency width of the signal received by an instrument such as a radar or lidar. The signal returned to the instrument is the sum of all scattering from a given volume of air regardless of mechanism (examples are scattering by aerosols, hydrometeors and refractive index irregularities, or whatever else the instrument detects). + Frequency width of the radio wave, that was transmitted by an instrument and propagates in the air where it's scattered by the air due to which its properties change, and it is received by an instrument. The "instrument" (examples are radar and lidar) is the device used to make the observation. The "scatterers" are what causes the transmitted signal to be returned to the instrument (examples are aerosols, hydrometeors and refractive index irregularities in the air). A standard name referring to frequency spectra width of the signal received at the instrument. + + + + Hz + + + The "instrument" (examples are radar and lidar) is the device used to make the observation. The "scatterers" are what causes the transmitted signal to be returned to the instrument (examples are aerosols, hydrometeors and refractive index irregularities), of whatever kind the instrument detects. A standard name referring to frequency width of the received signal. @@ -21180,14 +21384,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". day - "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". + "Amount" means mass per unit area. "Precipitation" in the earth's atmosphere means precipitation of water in all phases.The construction lwe_thickness_of_X_amount or _content means the vertical extent of a layer of liquid water having the same mass per unit area. The abbreviation "lwe" means liquid water equivalent. A spell is the number of consecutive days on which the condition X_below|above_threshold is satisfied. A variable whose standard name has the form spell_length_of_days_with_X_below|above_threshold must have a coordinate variable or scalar coordinate variable with the standard name of X to supply the threshold(s). It must have a climatological time variable, and a cell_method entry for within days which describes the processing of quantity X before the threshold is applied. A spell_length_of_days is an intensive quantity in time, and the cell_methods entry for over days can be any of the methods listed in Appendix E appropriate for intensive quantities e.g. "maximum", "minimum" or "mean". @@ -21264,7 +21468,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1e-6 - The phrase "square_of_X" means X*X. Sea surface salinity is the salt concentration of sea water close to the sea surface, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. Sea surface salinity is often abbreviated as "SSS". For the salinity of sea water at a particular depth or layer, a data variable of "sea_water_salinity" or one of the more precisely defined salinities should be used with a vertical coordinate axis. There are standard names for the precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + The phrase "square_of_X" means X*X. Sea surface salinity is the salt concentration of sea water close to the sea surface, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and the units attribute should normally be given as 1e-3 or 0.001 i.e. parts per thousand. Sea surface salinity is often abbreviated as "SSS". For the salinity of sea water at a particular depth or layer, a data variable of "sea_water_salinity" or one of the more precisely defined salinities should be used with a vertical coordinate axis. There are standard names for the precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -21341,14 +21545,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. To specify which area is quantified by a variable with standard name area_fraction, provide a coordinate variable or scalar coordinate variable with standard name area_type. Alternatively, if one is defined, use a more specific standard name of X_area_fraction for the fraction of horizontal area occupied by X. The cloud area fraction is for the whole atmosphere column, as seen from the surface or the top of the atmosphere. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). Cloud area fraction is also called "cloud amount" and "cloud cover". 1 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). Cloud area fraction is also called "cloud amount" and "cloud cover". + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Layer" means any layer with upper and lower boundaries that have constant values in some vertical coordinate. There must be a vertical coordinate variable indicating the extent of the layer(s). If the layers are model layers, the vertical coordinate can be model_level_number, but it is recommended to specify a physical coordinate (in a scalar or auxiliary coordinate variable) as well. Standard names also exist for high, medium and low cloud types. Standard names referring only to "cloud_area_fraction" should be used for quantities for the whole atmosphere column. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). Cloud area fraction is also called "cloud amount" and "cloud cover". @@ -21376,14 +21580,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 62 - "Precipitation" in the earth's atmosphere means precipitation of water in all phases. Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Amount" means mass per unit area. + "Precipitation" in the earth's atmosphere means precipitation of water in all phases. Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Amount" means mass per unit area. kg m-2 s-1 - Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + Stratiform precipitation, whether liquid or frozen, is precipitation that formed in stratiform cloud. "Precipitation" in the earth's atmosphere means precipitation of water in all phases. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. @@ -21502,21 +21706,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 84 E174 - The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. To specify the nature of the surface a cell_methods attribute should be supplied as described in Chapter 7.3.3 of the CF Conventions. + The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. To specify the nature of the surface a cell_methods attribute should be supplied as described in Chapter 7.3.3 of the CF Conventions. 1 - The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. + The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. 1 - The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. + The surface called "surface" means the lower boundary of the atmosphere. Albedo is the ratio of outgoing to incoming shortwave irradiance, where 'shortwave irradiance' means that both the incoming and outgoing radiation are integrated across the solar spectrum. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. @@ -21733,7 +21937,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The chemical formula for carbon dioxide is CO2. + "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The chemical formula for carbon dioxide is CO2. @@ -21887,14 +22091,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -22062,14 +22266,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Downwelling radiation is radiation from above. It does not mean "net downward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -22237,14 +22441,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 - The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). Net downward radiation is the difference between radiation from above (downwelling) and radiation from below (upwelling). The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). Net downward radiation is the difference between radiation from above (downwelling) and radiation from below (upwelling). The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". W m-2 - The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). Net downward radiation is the difference between radiation from above (downwelling) and radiation from below (upwelling). The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. + The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). Net downward radiation is the difference between radiation from above (downwelling) and radiation from below (upwelling). The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. @@ -22286,14 +22490,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "All land processes" means plant and soil respiration, photosynthesis, animal grazing, crop harvesting, natural fires and anthropogenic land use change. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The chemical formula for carbon dioxide is CO2. + "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "All land processes" means plant and soil respiration, photosynthesis, animal grazing, crop harvesting, natural fires and anthropogenic land use change. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The chemical formula for carbon dioxide is CO2. kg m-2 s-1 - "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "All land processes" means plant and soil respiration, photosynthesis, animal grazing, crop harvesting, natural fires and anthropogenic land use change. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The chemical formula for carbon dioxide is CO2. + "Downward" indicates a vector component which is positive when directed downward (negative upward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "All land processes" means plant and soil respiration, photosynthesis, animal grazing, crop harvesting, natural fires and anthropogenic land use change. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The chemical formula for carbon dioxide is CO2. @@ -22307,14 +22511,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 - The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). The term "shortwave" means shortwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". + The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). The term "shortwave" means shortwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". W m-2 - The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). The term "shortwave" means shortwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. + The surface called "surface" means the lower boundary of the atmosphere. "Downward" indicates a vector component which is positive when directed downward (negative upward). The term "shortwave" means shortwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. @@ -22342,7 +22546,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. + "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Anthropogenic land use change" means human changes to land, excluding forest regrowth. It includes fires ignited by humans for the purpose of land use change and the processes of eventual disposal and decomposition of wood products such as paper, cardboard, furniture and timber for construction. The quantity with standard name surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes is equal to the difference between the quantities with standard names surface_net_downward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_all_land_processes_excluding_anthropogenic_land_use_change and surface_net_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_emission_from_anthropogenic_land_use_change. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. @@ -25051,7 +25255,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 snc - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Surface snow refers to the snow on the solid ground or on surface ice cover, but excludes, for example, falling snowflakes and snow on plants. @@ -25233,7 +25437,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The chemical formula for carbon dioxide is CO2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The definition of "crop" is model dependent, for example, some models may include fruit trees, trees grown for timber or other types of agricultural and forestry planting as crops. Crop harvesting means the human activity of collecting plant materials for the purpose of turning them into forestry or agricultural products. + The surface called "surface" means the lower boundary of the atmosphere. "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The chemical formula for carbon dioxide is CO2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The definition of "crop" is model dependent, for example, some models may include fruit trees, trees grown for timber or other types of agricultural and forestry planting as crops. Crop harvesting means the human activity of collecting plant materials for the purpose of turning them into forestry or agricultural products. @@ -25247,14 +25451,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Fires excluding anthropogenic land use change" means all natural fires and human ignited fires that are not associated with change of land use. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. "Anthropogenic" means influenced, caused, or created by human activity. + "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. "Fires excluding anthropogenic land use change" means all natural fires and human ignited fires that are not associated with change of land use. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. "Anthropogenic" means influenced, caused, or created by human activity. kg m-2 s-1 - "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. + "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. @@ -25275,7 +25479,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. + "Upward" indicates a vector component which is positive when directed upward (negative downward). The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The surface called "surface" means the lower boundary of the atmosphere. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. @@ -25488,11 +25692,18 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. + + W/m2 + + + The surface called "surface" means the lower boundary of the atmosphere. The term "longwave" means longwave radiation. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. The 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + + W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "longwave" means longwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -25597,14 +25808,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The surface called "surface" means the lower boundary of the atmosphere. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -25849,21 +26060,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. K s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. A variable with the standard name tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation should contain net latent heating effects of all processes which convert stratiform clouds and precipitation between water vapor, liquid or ice phases. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). + The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. A variable with the standard name tendency_of_air_temperature_due_to_stratiform_cloud_and_precipitation should contain net latent heating effects of all processes which convert stratiform clouds and precipitation between water vapor, liquid or ice phases. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). K s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). + The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). K s-1 tntlsp - The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). + The phrase "tendency_of_X" means derivative of X with respect to time. Air temperature is the bulk temperature of the air, not the surface (skin) temperature. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). @@ -25947,63 +26158,63 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Alcohols include all organic compounds with an alcoholic (OH) group. In standard names "alcohols" is the term used to describe the group of chemical species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -26045,7 +26256,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. @@ -26066,7 +26277,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for alpha_pinene is C10H16. The IUPAC name for alpha-pinene is (1S,5S)-2,6,6-trimethylbicyclo[3.1.1]hept-2-ene. @@ -26080,63 +26291,63 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. he chemical formula for ammonia is NH3. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. he chemical formula for ammonia is NH3. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ammonia is NH3. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -26171,77 +26382,77 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for beta_pinene is C10H16. The IUPAC name for beta-pinene is (1S,5S)-6,6-dimethyl-2-methylenebicyclo[3.1.1]heptane. @@ -26255,91 +26466,91 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for butane is C4H10. Butane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. "Anthropogenic" means influenced, caused, or created by human activity. Anthropogenic emission of carbon dioxide includes fossil fuel use, cement production, agricultural burning and sources associated with anthropogenic land use change, except forest regrowth. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. "Anthropogenic" means influenced, caused, or created by human activity. Anthropogenic emission of carbon dioxide includes fossil fuel use, cement production, agricultural burning and sources associated with anthropogenic land use change, except forest regrowth. @@ -26353,7 +26564,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. Fossil fuel combustion includes cement production and flaring of natural gas. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon dioxide is CO2. Fossil fuel combustion includes cement production and flaring of natural gas. @@ -26367,168 +26578,168 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon monoxide is CO. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon monoxide is CO. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon monoxide is CO. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for carbon monoxide is CO. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon monoxide is CO. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC113a is CCl3CF3. The IUPAC name for CFC113a is 1,1,1-trichloro-2,2,2-trifluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC113a is CCl3CF3. The IUPAC name for CFC113a is 1,1,1-trichloro-2,2,2-trifluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC113 is CCl2FCClF2. The IUPAC name for CFC113 is 1,1,2-trichloro-1,2,2-trifluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC113 is CCl2FCClF2. The IUPAC name for CFC113 is 1,1,2-trichloro-1,2,2-trifluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC114 is CClF2CClF2. The IUPAC name for CFC114 is 1,2-dichloro-1,1,2,2-tetrafluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC114 is CClF2CClF2. The IUPAC name for CFC114 is 1,2-dichloro-1,1,2,2-tetrafluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer are used". The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC115 is CClF2CF3. The IUPAC name for CFC115 is 1-chloro-1,1,2,2,2-pentafluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer are used". The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC115 is CClF2CF3. The IUPAC name for CFC115 is 1-chloro-1,1,2,2,2-pentafluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC11 is CFCl3. The IUPAC name for CFC11 is trichloro(fluoro)methane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula of CFC11 is CFCl3. The IUPAC name for CFC11 is trichloro(fluoro)methane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for CFC12 is CF2Cl2. The IUPAC name for CFC12 is dichloro(difluoro)methane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for CFC12 is CF2Cl2. The IUPAC name for CFC12 is dichloro(difluoro)methane. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Chlorinated hydrocarbons are a group of chemicals composed of carbon, chlorine and hydrogen. As pesticides, they are also referred to by several other names, including chlorinated organics, chlorinated insecticides and chlorinated synthetics. In standard names "chlorinated_hydrocarbons" is the term used to describe the group of chlorinated hydrocarbon species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -26542,21 +26753,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for dimethyl sulfide is (CH3)2S. Dimethyl sulfide is sometimes referred to as DMS. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -26710,308 +26921,308 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Esters in organic chemistry are chemical compounds derived by reacting an oxoacid with a hydroxyl compound such as an alcohol or phenol. Esters are usually derived from an inorganic acid or organic acid in which at least one -OH (hydroxyl) group is replaced by an -O-alkyl (alkoxy) group, and most commonly from carboxylic acids and alcohols. That is, esters are formed by condensing an acid with an alcohol. In standard names "esters" is the term used to describe the group of ester species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethane is C2H6. Ethane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethanol is C2H5OH. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethanol is C2H5OH. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethene is C2H4. Ethene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Ethers are a class of organic compounds that contain an ether group - an oxygen atom connected to two alkyl or aryl groups - of general formula R-O-R. In standard names "ethers" is the term used to describe the group of ether species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for ethyne is HC2H. Ethyne is the IUPAC name for this species, which is also commonly known as acetylene. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27025,70 +27236,70 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for formaldehyde is CH2O. The IUPAC name for formaldehyde is methanal. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27123,7 +27334,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s) in a salt or to other atom(s) in a molecule. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. "Divalent mercury" means all compounds in which the mercury has two binding sites to other ion(s) in a salt or to other atom(s) in a molecule. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. @@ -27144,7 +27355,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. @@ -27158,56 +27369,56 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1202 is CBr2F2. The IUPAC name for Halon1202 is dibromo(difluoro)methane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1202 is CBr2F2. The IUPAC name for Halon1202 is dibromo(difluoro)methane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1211 is CBrClF2. The IUPAC name for Halon1211 is bromo-chloro-difluoromethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1211 is CBrClF2. The IUPAC name for Halon1211 is bromo-chloro-difluoromethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1301 is CBrF3. The IUPAC name for Halon1301 is bromo(trifluoro)methane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon1301 is CBrF3. The IUPAC name for Halon1301 is bromo(trifluoro)methane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon2402 is C2Br2F4. The IUPAC name for Halon2402 is 1,2-dibromo-1,1,2,2-tetrafluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for Halon2402 is C2Br2F4. The IUPAC name for Halon2402 is 1,2-dibromo-1,1,2,2-tetrafluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for HCC140a, also called methyl chloroform, is CH3CCl3. The IUPAC name for HCC140a is 1,1,1-trichloroethane. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for HCC140a, also called methyl chloroform, is CH3CCl3. The IUPAC name for HCC140a is 1,1,1-trichloroethane. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for HCFC141b is CH3CCl2F. The IUPAC name for HCFC141b is 1,1-dichloro-1-fluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for HCFC142b is CH3CClF2. The IUPAC name for HCFC142b is 1-chloro-1,1-difluoroethane. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for HCFC22 is CHClF2. The IUPAC name for HCFC22 is chloro(difluoro)methane. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for HCFC22 is CHClF2. The IUPAC name for HCFC22 is chloro(difluoro)methane. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. @@ -27221,7 +27432,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for hexachlorobiphenyl is C12H4Cl6. The structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for hexachlorobiphenyl is C12H4Cl6. The structure of this species consists of two linked benzene rings, each of which is additionally bonded to three chlorine atoms. @@ -27249,7 +27460,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for hydrogen cyanide is HCN. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for hydrogen cyanide is HCN. @@ -27277,98 +27488,98 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The chemical formula for isoprene is CH2=C(CH3)CH=CH2. The IUPAC name for isoprene is 2-methylbuta-1,3-diene. Isoprene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. In organic chemistry, a ketone is a compound with the structure RC(=O)R', where R and R' can be a variety of atoms and groups of atoms. It features a carbonyl group (C=O) bonded to two other carbon atoms. Acetone is the simplest example of a ketone. In standard names "ketones" is the term used to describe the group of ketone species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The chemical formula for limonene is C10H16. The IUPAC name for limonene is 1-methyl-4-prop-1-en-2-ylcyclohexene. Limonene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The chemical formula for limonene is C10H16. The IUPAC name for limonene is 1-methyl-4-prop-1-en-2-ylcyclohexene. Limonene is a member of the group of hydrocarbons known as terpenes. There are standard names for the terpene group as well as for some of the individual species. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. @@ -27396,77 +27607,77 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. he chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. he chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methane is CH4. Methane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27494,21 +27705,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methanol is CH3OH. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methanol is CH3OH. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methyl bromide is CH3Br. The IUPAC name for methyl bromide is bromomethane. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for methyl chloride is CH3Cl. The IUPAC name for methyl chloride is chloromethane. @@ -27522,21 +27733,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for molecular hydrogen is H2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27627,98 +27838,98 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27732,7 +27943,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrous acid is HNO2. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrous acid is HNO2. @@ -27753,7 +27964,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrous oxide is N2O. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrous oxide is N2O. @@ -27851,70 +28062,70 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Nox" means nitric oxide (NO) and nitrogen dioxide (NO2). "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Nox" means nitric oxide (NO) and nitrogen dioxide (NO2). "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27928,7 +28139,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -27949,56 +28160,56 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. An organic acid is an organic compound with acidic properties. The most common organic acids are the carboxylic acids, whose acidity is associated with their carboxyl group -COOH. In standard names "organic_acids" is the term used to describe the group of organic acid species that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -28194,63 +28405,63 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for pentane is C5H12. Pentane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -28418,161 +28629,161 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propane is C3H8. Propane is a member of the group of hydrocarbons known as alkanes. There are standard names for the alkane group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for propene is C3H6. Propene is a member of the group of hydrocarbons known as alkenes. There are standard names for the alkene group as well as for some of the individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical symbol for radon is Rn. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical symbol for radon is Rn. @@ -28740,70 +28951,70 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for sulfur dioxide is SO2. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -28817,105 +29028,105 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. Terpenes are hydrocarbons, that is, they contain only hydrogen and carbon combined in the general proportions (C5H8)n where n is an integer greater than on equal to one. The term "terpenes" is used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual terpene species, e.g., isoprene and limonene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for trimethylbenzene is C9H12. The IUPAC names for trimethylbenzene is 1,3,5-trimethylbenzene. Trimethylbenzene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for trimethylbenzene is C9H12. The IUPAC names for trimethylbenzene is 1,3,5-trimethylbenzene. Trimethylbenzene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -28992,84 +29203,84 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including content_of_atmosphere_layer are used. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural production" sector comprises the agricultural processes of enteric fermentation, manure management, rice cultivation, agricultural soils and other. It may also include any not-classified or "other" combustion, which is commonly included in agriculture-related inventory data. "Agricultural production" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 4A, 4B, 4C, 4D and 4G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "agricultural waste burning" sector comprises field burning of agricultural residues. "Agricultural waste burning" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 4F as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "energy production and distribution" sector comprises fuel combustion activities related to energy industries and fugitive emissions from fuels. It may also include any not-classified or "other" combustion, which is commonly included in energy-related inventory data. "Energy production and distribution" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A1 and 1B as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "forest fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in forests. "Forest fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "industrial processes and combustion" sector comprises fuel combustion activities related to manufacturing industries and construction, industrial processes related to mineral products, the chemical industry, metal production, the production of pulp, paper, food and drink, and non-energy industry use of lubricants and waxes. It may also include any not-classified or "other" combustion, which is commonly included in industry-related inventory data. "Industrial processes and combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A2, 2A, 2B, 2C, 2D and 2G as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "land transport" sector includes fuel combustion activities related to road transportation, railways and other transportation. "Land transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A3b, 1A3c and 1A3e as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "maritime transport" sector includes fuel combustion activities related to maritime transport. "Maritime transport" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3d as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "residential and commercial combustion" sector comprises fuel combustion activities related to the commercial/institutional sector, the residential sector and the agriculture/forestry/fishing sector. It may also include any not-classified or "other" combustion, which is commonly included in the inventory data. "Residential and commercial combustion" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 1A4a, 1A4b and 1A4c as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "savanna and grassland fires" sector comprises the burning (natural and human-induced) of living or dead vegetation in non-forested areas. It excludes field burning of agricultural residues. "Savanna and grassland fires" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 5 as defined in the 2006 IPCC guidelines for national greenhouse gas Inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "solvent production and use" sector comprises industrial processes related to the consumption of halocarbons, SF6, solvent and other product use. "Solvent production and use" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 2F and 3 as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-2 s-1 - "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The "atmosphere content" of a quantity refers to the vertical integral from the surface to the top of the atmosphere. For the content between specified levels in the atmosphere, standard names including "content_of_atmosphere_layer" are used. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. The "waste treatment and disposal" sector comprises solid waste disposal on land, wastewater handling, waste incineration and other waste disposal. "Waste treatment and disposal" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source categories 6A, 6B, 6C and 6D as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -29475,7 +29686,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol s-1 - "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "HOx" means a combination of two radical species containing hydrogen and oxygen: OH and HO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -29671,14 +29882,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol s-1 - "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. mol s-1 - "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + "tendency_of_X" means derivative of X with respect to time. The construction "atmosphere_moles_of_X" means the total number of moles of X in the entire atmosphere, i.e. summed over the atmospheric column and over the entire globe. "Noy" describes a family of chemical species. The family usually includes atomic nitrogen (N), nitrogen monoxide (NO), nitrogen dioxide (NO2), dinitrogen pentoxide (N2O5), nitric acid (HNO3), peroxynitric acid (HNO4), bromine nitrate (BrONO2) , chlorine nitrate (ClONO2) and organic nitrates (most notably peroxyacetyl nitrate, sometimes referred to as PAN, (CH3COO2NO2)). The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. @@ -29972,21 +30183,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-3 s-1 - "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen dioxide is NO2. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-3 s-1 - "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The mass is the total mass of the molecules. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. The chemical formula for nitrogen monoxide is NO. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". kg m-3 s-1 - "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". + "tendency_of_X" means derivative of X with respect to time. Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Emission" means emission from a primary source located anywhere within the atmosphere, including at the lower boundary (i.e. earth's surface). "Emission" is a process entirely distinct from "re-emission" which is used in some standard names. "Nox" means a combination of two radical species containing nitrogen and oxygen: NO+NO2. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The "aviation" sector includes fuel combustion activities related to civil aviation. "Aviation" is the term used in standard names to describe a collection of emission sources. A variable which has this value for the standard_name attribute should be accompanied by a comment attribute which lists the source categories and provides a reference to the categorization scheme, for example, "IPCC (Intergovernmental Panel on Climate Change) source category 1A3a as defined in the 2006 IPCC guidelines for national greenhouse gas inventories". @@ -30105,21 +30316,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Autoconversion is the process of collision and coalescence which results in the formation of precipitation particles from cloud water droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Autoconversion is the process of collision and coalescence which results in the formation of precipitation particles from cloud water droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Autoconversion is the process of collision and coalescence which results in the formation of precipitation particles from cloud water droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Autoconversion is the process of collision and coalescence which results in the formation of precipitation particles from cloud water droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. @@ -30140,14 +30351,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) Condensation is the conversion of vapor into liquid. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) Condensation is the conversion of vapor into liquid. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. "condensed_water" means liquid and ice. @@ -30161,21 +30372,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Accretion is the growth of a hydrometeor by collision with cloud droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Accretion is the growth of a hydrometeor by collision with cloud droplets or ice crystals. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aggregation is the clumping together of frozen cloud particles to produce snowflakes. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Aggregation is the clumping together of frozen cloud particles to produce snowflakes. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. @@ -30203,21 +30414,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Sublimation is the conversion of solid into vapor. Deposition is the opposite of sublimation, i.e. it is the conversion of vapor into solid. Deposition is distinct from the processes of dry deposition and wet deposition of atmospheric aerosol particles, which are referred to in some standard names. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Sublimation is the conversion of solid into vapor. Deposition is the opposite of sublimation, i.e. it is the conversion of vapor into solid. Deposition is distinct from the processes of dry deposition and wet deposition of atmospheric aerosol particles, which are referred to in some standard names. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Evaporation is the conversion of liquid or solid into vapor. (The conversion of solid alone into vapor is called "sublimation".) In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. @@ -30245,7 +30456,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. @@ -30259,7 +30470,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. @@ -30273,7 +30484,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Riming is the rapid freezing of supercooled water onto a surface. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. + Mass fraction is used in the construction mass_fraction_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Riming is the rapid freezing of supercooled water onto a surface. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. @@ -30700,14 +30911,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. mol m-2 s-1 - "Content" indicates a quantity per unit area. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. + "Content" indicates a quantity per unit area. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Aragonite is a mineral that is a polymorph of calcium carbonate. The chemical formula of aragonite is CaCO3. Standard names also exist for calcite, another polymorph of calcium carbonate. mol m-2 s-1 - "Content" indicates a quantity per unit area. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. + "Content" indicates a quantity per unit area. The phrase 'expressed_as' is used in the construction A_expressed_as_B, where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "tendency_of_X" means derivative of X with respect to time. Calcite is a mineral that is a polymorph of calcium carbonate. The chemical formula of calcite is CaCO3. Standard names also exist for aragonite, another polymorph of calcium carbonate. @@ -31043,21 +31254,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice dynamics" refers to the motion of sea ice. + The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice dynamics" refers to the motion of sea ice. s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Sea ice "ridging" occurs in rough sea conditions. The motion of the sea surface can cause areas of sea ice to deform and fold resulting in ridged upper and lower surfaces. The ridges can be as much as twenty metres thick if thick ice is deformed. + The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Sea ice "ridging" occurs in rough sea conditions. The motion of the sea surface can cause areas of sea ice to deform and fold resulting in ridged upper and lower surfaces. The ridges can be as much as twenty metres thick if thick ice is deformed. s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice thermodynamics" refers to the addition or subtraction of mass due to surface and basal fluxes. + The phrase "tendency_of_X" means derivative of X with respect to time. "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. Sea ice area fraction is area of the sea surface occupied by sea ice. It is also called "sea ice concentration". "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice thermodynamics" refers to the addition or subtraction of mass due to surface and basal fluxes. @@ -31134,7 +31345,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 - The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the conservative temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Conservative Temperature is defined as part of the Thermodynamic Equation of Seawater 2010 (TEOS-10) which was adopted in 2010 by the International Oceanographic Commission (IOC). Conservative Temperature is specific potential enthalpy (which has the standard name sea_water_specific_potential_enthalpy) divided by a fixed value of the specific heat capacity of sea water, namely cp_0 = 3991.86795711963 J kg-1 K-1. Conservative Temperature is a more accurate measure of the "heat content" of sea water, by a factor of one hundred, than is potential temperature. Because of this, it can be regarded as being proportional to the heat content of sea water per unit mass. Reference: www.teos-10.org; McDougall, 2003 doi: 10.1175/1520-0485(2003)033<0945:PEACOV>2.0.CO;2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. + The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the conservative temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Conservative Temperature is defined as part of the Thermodynamic Equation of Seawater 2010 (TEOS-10) which was adopted in 2010 by the International Oceanographic Commission (IOC). Conservative Temperature is specific potential enthalpy (which has the standard name sea_water_specific_potential_enthalpy) divided by a fixed value of the specific heat capacity of sea water, namely cp_0 = 3991.86795711963 J kg-1 K-1. Conservative Temperature is a more accurate measure of the "heat content" of sea water, by a factor of one hundred, than is potential temperature. Because of this, it can be regarded as being proportional to the heat content of sea water per unit mass. Reference: www.teos-10.org; McDougall, 2003 doi: 10.1175/1520-0485(2003)033<0945:PEACOV>2.0.CO;2. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. @@ -31183,49 +31394,49 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 - The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the potential temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Potential temperature is the temperature a parcel of air or sea water would have if moved adiabatically to sea level pressure. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. + The phrase "tendency_of_X" means derivative of X with respect to time. The phrase "expressed_as_heat_content" means that this quantity is calculated as the specific heat capacity times density of sea water multiplied by the potential temperature of the sea water in the grid cell and integrated over depth. If used for a layer heat content, coordinate bounds should be used to define the extent of the layers. If no coordinate bounds are specified, it is assumed that the integral is calculated over the entire vertical extent of the medium, e.g, if the medium is sea water the integral is assumed to be calculated over the full depth of the ocean. Potential temperature is the temperature a parcel of air or sea water would have if moved adiabatically to sea level pressure. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. 1e-3 s-1 - "tendency_of_X" means derivative of X with respect to time. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "tendency_of_X" means derivative of X with respect to time. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. 1e-3 s-1 - "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. 1e-3 s-1 - "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Horizontal mixing" means any horizontal transport other than by advection and parameterized eddy advection, usually represented as horizontal diffusion in ocean models. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Horizontal mixing" means any horizontal transport other than by advection and parameterized eddy advection, usually represented as horizontal diffusion in ocean models. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. 1e-3 s-1 - "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized eddy advection can be represented on various spatial scales and there are standard names for parameterized_mesoscale_eddy_advection and parameterized_submesoscale_eddy_advection which both contribute to the total parameterized eddy advection. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. Parameterized eddy advection in an ocean model means the part due to a scheme representing parameterized eddy-induced advective effects not included in the resolved model velocity field. Parameterized eddy advection can be represented on various spatial scales and there are standard names for parameterized_mesoscale_eddy_advection and parameterized_submesoscale_eddy_advection which both contribute to the total parameterized eddy advection. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. 1e-3 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice thermodynamics" refers to the addition or subtraction of sea ice mass due to surface and basal fluxes, i.e. due to melting, sublimation and fusion. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. + The phrase "tendency_of_X" means derivative of X with respect to time. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Sea ice thermodynamics" refers to the addition or subtraction of sea ice mass due to surface and basal fluxes, i.e. due to melting, sublimation and fusion. "Sea ice" means all ice floating in the sea which has formed from freezing sea water, rather than by other processes such as calving of land ice to form icebergs. 1e-3 s-1 - "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Vertical mixing" means any vertical transport other than by advection and parameterized eddy advection, represented by a combination of vertical diffusion, turbulent mixing and convection in ocean models. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. + "tendency_of_X" means derivative of X with respect to time. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. "Vertical mixing" means any vertical transport other than by advection and parameterized eddy advection, represented by a combination of vertical diffusion, turbulent mixing and convection in ocean models. Sea water salinity is the salt content of sea water, often on the Practical Salinity Scale of 1978. However, the unqualified term 'salinity' is generic and does not necessarily imply any particular method of calculation. The units of salinity are dimensionless and normally given as 1e-3 or 0.001 i.e. parts per thousand. There are standard names for the more precisely defined salinity quantities: sea_water_knudsen_salinity, S_K (used for salinity observations between 1901 and 1966), sea_water_cox_salinity, S_C (used for salinity observations between 1967 and 1977), sea_water_practical_salinity, S_P (used for salinity observations from 1978 to the present day), sea_water_absolute_salinity, S_A, sea_water_preformed_salinity, S_*, and sea_water_reference_salinity. Practical Salinity is reported on the Practical Salinity Scale of 1978 (PSS-78), and is usually based on the electrical conductivity of sea water in observations since the 1960s. Conversion of data between the observed scales follows: S_P = (S_K - 0.03) * (1.80655 / 1.805) and S_P = S_C, however the accuracy of the latter is dependent on whether chlorinity or conductivity was used to determine the S_C value, with this inconsistency driving the development of PSS-78. The more precise standard names should be used where appropriate for both modelled and observed salinities. In particular, the use of sea_water_salinity to describe salinity observations made from 1978 onwards is now deprecated in favor of the term sea_water_practical_salinity which is the salinity quantity stored by national data centers for post-1978 observations. The only exception to this is where the observed salinities are definitely known not to be recorded on the Practical Salinity Scale. The unit "parts per thousand" was used for sea_water_knudsen_salinity and sea_water_cox_salinity. @@ -31274,7 +31485,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. kg m-2 s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. + The phrase "tendency_of_X" means derivative of X with respect to time. "Content" indicates a quantity per unit area. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. The phrase "residual_mean_advection" refers to the sum of the model's resolved advective transport plus any parameterized advective transport. Parameterized advective transport includes processes such as parameterized mesoscale and submesoscale transport, as well as any other advectively parameterized transport. When the parameterized advective transport is represented in the model as a skew-diffusion rather than an advection, then the parameterized skew diffusion should be included in this diagnostic, since the convergence of skew-fluxes are identical (in the continuous formulation) to the convergence of advective fluxes. @@ -31372,21 +31583,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. "Specific" means per unit mass. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. A variable with the standard name of tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation should contain the effects of all processes which convert stratiform clouds and precipitation to or from water vapor. + The phrase "tendency_of_X" means derivative of X with respect to time. "Specific" means per unit mass. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. A variable with the standard name of tendency_of_specific_humidity_due_to_stratiform_cloud_and_precipitation should contain the effects of all processes which convert stratiform clouds and precipitation to or from water vapor. s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). + The phrase "tendency_of_X" means derivative of X with respect to time. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. "Boundary layer mixing" means turbulent motions that transport heat, water, momentum and chemical constituents within the atmospheric boundary layer and affect exchanges between the surface and the atmosphere. The atmospheric boundary layer is typically characterised by a well-mixed sub-cloud layer of order 500 metres, and by a more extended conditionally unstable layer with boundary-layer clouds up to 2 km. (Reference: IPCC Third Assessment Report, Working Group 1: The Scientific Basis, 7.2.2.3, https://archive.ipcc.ch/ipccreports/tar/wg1/273.htm). s-1 - The phrase "tendency_of_X" means derivative of X with respect to time. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. + The phrase "tendency_of_X" means derivative of X with respect to time. Specific humidity is the mass fraction of water vapor in (moist) air. The specification of a physical process by the phrase due_to_process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. In an atmosphere model, stratiform cloud is that produced by large-scale convergence (not the convection schemes). "Precipitation" in the earth's atmosphere means precipitation of water in all phases. @@ -31736,14 +31947,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. K - toa_brightness_temperature_bias_at_standard_scene_due_to_intercalibration is the difference between top-of-atmosphere (TOA) brightness temperature of the reference sensor and TOA brightness temperature of the monitored sensor. This TOA brightness temperature difference is a measure of the calibration difference between the monitored and reference sensors. The standard scene is a target area with typical Earth surface and atmospheric conditions that is accepted as a reference. Brightness temperature of a body is the temperature of a black body which radiates the same power per unit solid angle per unit area at a given wavenumber. TOA brightness temperature of the standard scene is calculated using a radiative transfer simulation for a given viewing geometry. The resultant top-of-atmosphere spectral radiance is then integrated with each sensor's spectral response function and converted to equivalent brightness temperature. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). + toa_brightness_temperature_bias_at_standard_scene_due_to_intercalibration is the difference between top-of-atmosphere (TOA) brightness temperature of the reference sensor and TOA brightness temperature of the monitored sensor. This TOA brightness temperature difference is a measure of the calibration difference between the monitored and reference sensors. The standard scene is a target area with typical Earth surface and atmospheric conditions that is accepted as a reference. Brightness temperature of a body is the temperature of a black body which radiates the same power per unit solid angle per unit area at a given wavenumber. TOA brightness temperature of the standard scene is calculated using a radiative transfer simulation for a given viewing geometry. The resultant top-of-atmosphere spectral radiance is then integrated with each sensor's spectral response function and converted to equivalent brightness temperature. The specification of a physical process by the phrase "due_to_" process means that the quantity named is a single term in a sum of terms which together compose the general quantity named by omitting the phrase. It is strongly recommended that a variable with this standard name should have the attribute units_metadata="temperature: difference", meaning that it refers to temperature differences and implying that the origin of the temperature scale is irrelevant, because it is essential to know whether a temperature is on-scale or a difference in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). K - "toa" means top of atmosphere. The brightness temperature of a body is the temperature of a black body which radiates the same power per unit solid angle per unit area at a given wavenumber. The standard scene is a target area with typical Earth surface and atmospheric conditions that is accepted as a reference. The toa radiance of the standard scene is calculated using a radiative transfer model for a given viewing geometry. The resultant toa spectral radiance is then integrated with a sensor's spectral response function and converted to equivalent brightness temperature. It is strongly recommended that a variable with this standard name should have a units_metadata attribute, with one of the values "on-scale" or "difference", whichever is appropriate for the data, because it is essential to know whether the temperature is on-scale (meaning relative to the origin of the scale indicated by the units) or refers to temperature differences (implying that the origin of the temperature scale is irrevelant), in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). + "toa" means top of atmosphere. The brightness temperature of a body is the temperature of a black body which radiates the same power per unit solid angle per unit area at a given wavenumber. The standard scene is a target area with typical Earth surface and atmospheric conditions that is accepted as a reference. The toa radiance of the standard scene is calculated using a radiative transfer model for a given viewing geometry. The resultant toa spectral radiance is then integrated with a sensor's spectral response function and converted to equivalent brightness temperature. It is strongly recommended that a variable with this standard name should have a units_metadata attribute, with one of the values "on-scale" or "difference", whichever is appropriate for the data, because it is essential to know whether the temperature is on-scale (meaning relative to the origin of the scale indicated by the units) or refers to temperature differences (implying that the origin of the temperature scale is irrevelant), in order to convert the units correctly (cf. https://cfconventions.org/cf-conventions/cf-conventions.html#temperature-units). @@ -31792,7 +32003,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 - "toa" means top of atmosphere. The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. + "toa" means top of atmosphere. The term "longwave" means longwave radiation. "X_direct_radiative_effect" refers to the instantaneous radiative impact of X on the Earth's energy balance, excluding secondary effects such as changes in cloud cover which may be caused by X. "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. "Ambient_aerosol" means that the aerosol is measured or modelled at the ambient state of pressure, temperature and relative humidity that exists in its immediate environment. "Ambient aerosol particles" are aerosol particles that have taken up ambient water through hygroscopic growth. The extent of hygroscopic growth depends on the relative humidity and the composition of the particles. To specify the relative humidity and temperature at which the quantity described by the standard name applies, provide scalar coordinate variables with standard names of "relative_humidity" and "air_temperature". A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. @@ -31869,14 +32080,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "longwave" means longwave radiation. "toa" means top of atmosphere. The TOA outgoing longwave flux is the upwelling thermal radiative flux, often called the "outgoing longwave radiation" or "OLR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "longwave" means longwave radiation. "toa" means top of atmosphere. The TOA outgoing longwave flux is the upwelling thermal radiative flux, often called the "outgoing longwave radiation" or "OLR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "longwave" means longwave radiation. "toa" means top of atmosphere. The TOA outgoing longwave flux is the upwelling thermal radiative flux, often called the "outgoing longwave radiation" or "OLR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "longwave" means longwave radiation. "toa" means top of atmosphere. The TOA outgoing longwave flux is the upwelling thermal radiative flux, often called the "outgoing longwave radiation" or "OLR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -31911,28 +32122,28 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W m-2 sr-1 (m-1)-1 - toa_outgoing_radiance_per_unit_wavenumber_mean_within_collocation_scene is an average of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation scene. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation scene" is a grouping of a sensor's adjacent field-of-views centered on a collocation target. The size of the collocation scene is typically about twice that of the collocation target. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. + toa_outgoing_radiance_per_unit_wavenumber_mean_within_collocation_scene is an average of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation scene. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation scene" is a grouping of a sensor's adjacent field-of-views centered on a collocation target. The size of the collocation scene is typically about twice that of the collocation target. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. W m-2 sr-1 (m-1)-1 - toa_outgoing_radiance_per_unit_wavenumber_mean_within_collocation_target is an average of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation target. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. + toa_outgoing_radiance_per_unit_wavenumber_mean_within_collocation_target is an average of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation target. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. W m-2 sr-1 (m-1)-1 - toa_outgoing_radiance_per_unit_wavenumber_stdev_within_collocation_scene is the standard deviation of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation scene. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation scene" is a grouping of a sensor's adjacent field-of-views centered on a collocation target. The size of the collocation scene is typically about twice that of the collocation target. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. + toa_outgoing_radiance_per_unit_wavenumber_stdev_within_collocation_scene is the standard deviation of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation scene. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation scene" is a grouping of a sensor's adjacent field-of-views centered on a collocation target. The size of the collocation scene is typically about twice that of the collocation target. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. W m-2 sr-1 (m-1)-1 - toa_outgoing_radiance_per_unit_wavenumber_stdev_within_collocation_target is the standard deviation of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation target. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. + toa_outgoing_radiance_per_unit_wavenumber_stdev_within_collocation_target is the standard deviation of observations of the quantity with standard name toa_outgoing_radiance_per_unit_wavenumber from a sensor's adjacent field-of-views within a collocation target. "toa" means top of atmosphere. The TOA outgoing radiance is the upwelling radiance, i.e., toward outer space. Radiance is the radiative flux in a particular direction, per unit of solid angle. In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. The "collocation target" is an area on the Earth's surface at which observations from at least two sensors are collected. Its size is defined by the sensor with the largest field-of-view footprint. Two events are deemed to be collocated based on some set of spatial, temporal, and viewing geometry criteria. @@ -31960,7 +32171,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The abbreviation "toa" means top of atmosphere. The term "shortwave" means shortwave radiation. The TOA outgoing shortwave flux is the reflected and scattered solar radiative flux i.e. the "upwelling" TOA shortwave flux, sometimes called the "outgoing shortwave radiation" or "OSR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The abbreviation "toa" means top of atmosphere. The term "shortwave" means shortwave radiation. The TOA outgoing shortwave flux is the reflected and scattered solar radiative flux i.e. the "upwelling" TOA shortwave flux, sometimes called the "outgoing shortwave radiation" or "OSR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -31974,7 +32185,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The abbreviation "toa" means top of atmosphere. The term "shortwave" means shortwave radiation. The TOA outgoing shortwave flux is the reflected and scattered solar radiative flux i.e. the "upwelling" TOA shortwave flux, sometimes called the "outgoing shortwave radiation" or "OSR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The abbreviation "toa" means top of atmosphere. The term "shortwave" means shortwave radiation. The TOA outgoing shortwave flux is the reflected and scattered solar radiative flux i.e. the "upwelling" TOA shortwave flux, sometimes called the "outgoing shortwave radiation" or "OSR". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -32191,21 +32402,21 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. To specify the amount of cloud cover at which the data variable applies, provide a scalar coordinate variable with standard name "cloud_area_fraction". Standard names are also defined for the quantities ultraviolet_index_assuming_clear_sky and ultraviolet_index_assuming_overcast_sky. + The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. To specify the amount of cloud cover at which the data variable applies, provide a scalar coordinate variable with standard name "cloud_area_fraction". Standard names are also defined for the quantities ultraviolet_index_assuming_clear_sky and ultraviolet_index_assuming_overcast_sky. 1 - The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_overcast_sky. + The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_overcast_sky. 1 - The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Overcast" means a fractional sky cover of 95% or more when at least a portion of this amount is attributable to clouds or obscuring phenomena (such as haze, dust, smoke, fog, etc.) aloft. (Reference: AMS Glossary: http://glossary.ametsoc.org/wiki/Main_Page). Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_clear_sky. + The "Ultraviolet Index" (UVI) is a measure of the amount of solar ultraviolet radiation that reaches the surface of the earth depending on factors such as time of day and cloud cover. It is often used to alert the public of the need to limit sun exposure and use sun creams to protect the skin. Each point on the Index scale is equivalent to 25 mW m-2 of UV radiation (reference: Australian Bureau of Meteorology, http://www.bom.gov.au/uv/about_uv_index.shtml). The UVI range is expressed as a numeric value from 0 to 20 and sometimes graphically as bands of color indicating the attendant risk of skin damage. A UVI of 0-2 is described as 'Low' (represented graphically in green); a UVI of 11 or greater is described as "Extreme" (represented graphically in purple). The higher the UVI, the greater the potential health risk to humans and the less time it takes for harm to occur. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Overcast" means a fractional sky cover of 95% or more when at least a portion of this amount is attributable to clouds or obscuring phenomena (such as haze, dust, smoke, fog, etc.) aloft. (Reference: AMS Glossary: http://glossary.ametsoc.org/wiki/Main_Page). Standard names are also defined for the quantities ultraviolet_index and ultraviolet_index_assuming_clear_sky. @@ -32285,18 +32496,18 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. "Eliassen Palm flux" is a widely used vector in the meridional plane, and the divergence of this flux appears as a forcing in the Transformed Eulerian mean formulation of the zonal mean zonal wind equation. "Upward" indicates a vector component which is positive when directed upward (negative downward). - + W m-2 - "Land ice" means glaciers, ice-caps and ice-sheets resting on bedrock and also includes ice-shelves. "ground_level" means the land surface (including beneath snow, ice and surface water, if any). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Upward" indicates a vector component which is positive when directed upward (negative downward). The quantity with standard name upward_geothermal_heat_flux_at_ground_level_in_land_ice is the upward heat flux at the interface between the ice and bedrock. It does not include any heat flux from the ocean into an ice shelf. + "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. - + W m-2 - "Upward" indicates a vector component which is positive when directed upward (negative downward). In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. + In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. "Upward" indicates a vector component which is positive when directed upward (negative downward). "Grounded ice sheet" indicates where the ice sheet rests over bedrock and is thus grounded. It excludes ice-caps, glaciers and floating ice shelves. The quantity with standard name upward_heat_flux_at_base_of_grounded_ice_sheet is the upward heat flux at the interface between the ice and bedrock. It does not include any heat flux from the ocean into an ice shelf. @@ -32450,14 +32661,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - The term "longwave" means longwave radiation. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The term "longwave" means longwave radiation. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - The term "longwave" means longwave radiation. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + The term "longwave" means longwave radiation. Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase assuming_condition indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -32513,14 +32724,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. W/m2 - Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. "Clear sky" means in the absence of clouds. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. W/m2 - Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. + Upwelling radiation is radiation from below. It does not mean "net upward". The sign convention is that "upwelling" is positive upwards and "downwelling" is positive downwards. The term "shortwave" means shortwave radiation. When thought of as being incident on a surface, a radiative flux is sometimes called "irradiance". In addition, it is identical with the quantity measured by a cosine-collector light-meter and sometimes called "vector irradiance". In accordance with common usage in geophysical disciplines, "flux" implies per unit area, called "flux density" in physics. A phrase "assuming_condition" indicates that the named quantity is the value which would obtain if all aspects of the system were unaltered except for the assumption of the circumstances specified by the condition. This 3D ozone field acts as a reference ozone field in a diagnostic call to the model's radiation scheme. It is expressed in terms of mole fraction of ozone in air. It may be observation-based or model-derived. It may be from any time period. By using the same ozone reference in the diagnostic radiation call in two model simulations and calculating differences between the radiative flux diagnostics from the prognostic call to the radiation scheme and the diagnostic call to the radiation scheme with the ozone reference, an instantaneous radiative forcing for ozone can be calculated. @@ -32534,7 +32745,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 87 - "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Vegetation" means any plants e.g. trees, shrubs, grass. The term "plants" refers to the kingdom of plants in the modern classification which excludes fungi. Plants are autotrophs i.e. "producers" of biomass using carbon obtained from carbon dioxide. + "Area fraction" is the fraction of a grid cell's horizontal area that has some characteristic of interest. It is evaluated as the area of interest divided by the grid cell area, or if the cell_methods restricts the evaluation to some portion of that grid cell (e.g. "where sea_ice"), then it is the area of interest divided by the area of the identified portion. It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. "Vegetation" means any plants e.g. trees, shrubs, grass. The term "plants" refers to the kingdom of plants in the modern classification which excludes fungi. Plants are autotrophs i.e. "producers" of biomass using carbon obtained from carbon dioxide. @@ -33255,7 +33466,7 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. 1 - "Volume fraction" is used in the construction volume_fraction_of_X_in_Y, where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The volume_fraction_of_water_in_soil_at_saturation is the volume fraction at which a soil has reached it's maximum water holding capacity. + "Volume fraction" is used in the construction volume_fraction_of_X_in_Y, where X is a material constituent of Y. It is evaluated as the volume of X divided by the volume of Y (including X). It may be expressed as a fraction, a percentage, or any other dimensionless representation of a fraction. The volume_fraction_of_water_in_soil_at_saturation is the volume fraction at which a soil has reached it's maximum water holding capacity. @@ -33675,14 +33886,14 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. m - 'Water surface height above reference datum' means the height of the upper surface of a body of liquid water, such as sea, lake or river, above an arbitrary reference datum. The altitude of the datum should be provided in a variable with standard name water_surface_reference_datum_altitude. The surface called "surface" means the lower boundary of the atmosphere. + 'Water surface height above reference datum' means the height of the upper surface of a body of liquid water, such as sea, lake or river, above an arbitrary reference datum. The altitude of the datum should be provided in a variable with standard name water_surface_reference_datum_altitude. The surface called "surface" means the lower boundary of the atmosphere. m - Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. 'Water surface reference datum altitude' means the altitude of the arbitrary datum referred to by a quantity with standard name 'water_surface_height_above_reference_datum'. The surface called "surface" means the lower boundary of the atmosphere. + Altitude is the (geometric) height above the geoid, which is the reference geopotential surface. The geoid is similar to mean sea level. 'Water surface reference datum altitude' means the altitude of the arbitrary datum referred to by a quantity with standard name 'water_surface_height_above_reference_datum'. The surface called "surface" means the lower boundary of the atmosphere. @@ -33903,28 +34114,16 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. - - isotropic_longwave_radiance_in_air - - - - isotropic_shortwave_radiance_in_air - - - - mole_fraction_of_ozone_in_air - - - - product_of_northward_wind_and_specific_humidity + + mass_concentration_of_chlorophyll_in_sea_water - - radiation_wavelength + + mass_concentration_of_chlorophyll_in_sea_water - - specific_gravitational_potential_energy + + leaf_mass_content_of_carbon @@ -33935,140 +34134,144 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. surface_drag_coefficient_for_momentum_in_air - - surface_drag_coefficient_in_air + + tendency_of_atmosphere_moles_of_methyl_chloride - - sea_surface_swell_wave_period + + atmosphere_moles_of_carbon_monoxide - - sea_surface_wind_wave_period + + tendency_of_atmosphere_moles_of_molecular_hydrogen - - mass_fraction_of_convective_cloud_condensed_water_in_air + + land_ice_lwe_surface_specific_mass_balance_rate - - mass_fraction_of_ozone_in_air + + land_ice_surface_specific_mass_balance_rate - - wave_frequency + + atmosphere_moles_of_methyl_chloride - - northward_eliassen_palm_flux_in_air + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots - - northward_heat_flux_in_air_due_to_eddy_advection + + atmosphere_moles_of_molecular_hydrogen - - upward_eliassen_palm_flux_in_air + + atmosphere_moles_of_nitrous_oxide - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves + + isotropic_radiance_per_unit_wavelength_in_air - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + isotropic_radiance_per_unit_wavelength_in_air - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + x_wind - - water_flux_into_sea_water + + mass_concentration_of_suspended_matter_in_sea_water - - wind_mixing_energy_flux_into_sea_water + + kinetic_energy_dissipation_in_atmosphere_boundary_layer - - surface_net_downward_radiative_flux + + y_wind - - surface_upward_sensible_heat_flux + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - atmosphere_moles_of_carbon_monoxide + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - atmosphere_moles_of_methane + + mass_content_of_cloud_condensed_water_in_atmosphere_layer - - atmosphere_moles_of_methyl_bromide + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - atmosphere_moles_of_methyl_chloride + + tendency_of_troposphere_moles_of_methane - - atmosphere_moles_of_molecular_hydrogen + + radiation_wavelength - - atmosphere_moles_of_nitrous_oxide + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - mass_concentration_of_suspended_matter_in_sea_water + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + tendency_of_troposphere_moles_of_methyl_bromide - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + mass_content_of_water_in_atmosphere_layer - - tendency_of_atmosphere_moles_of_methyl_bromide + + mass_content_of_water_vapor_in_atmosphere_layer - - tendency_of_atmosphere_moles_of_methyl_chloride + + tendency_of_troposphere_moles_of_methyl_chloride - - tendency_of_atmosphere_moles_of_molecular_hydrogen + + tendency_of_troposphere_moles_of_molecular_hydrogen - - tendency_of_atmosphere_moles_of_nitrous_oxide + + isotropic_longwave_radiance_in_air - - tendency_of_middle_atmosphere_moles_of_carbon_monoxide + + isotropic_shortwave_radiance_in_air - - tendency_of_middle_atmosphere_moles_of_methane + + sea_water_x_velocity - - tendency_of_middle_atmosphere_moles_of_methyl_bromide + + sea_water_y_velocity + + + + tendency_of_middle_atmosphere_moles_of_methane @@ -34079,48 +34282,80 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - tendency_of_troposphere_moles_of_carbon_monoxide + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_troposphere_moles_of_methane + + sea_surface_wind_wave_period - - tendency_of_troposphere_moles_of_methyl_bromide + + sea_water_volume - - tendency_of_troposphere_moles_of_methyl_chloride + + thickness_of_stratiform_rainfall_amount - - tendency_of_troposphere_moles_of_molecular_hydrogen + + stratiform_snowfall_flux - - atmosphere_net_upward_convective_mass_flux + + mole_fraction_of_ozone_in_air - - eastward_water_vapor_flux_in_air + + product_of_northward_wind_and_specific_humidity - - kinetic_energy_dissipation_in_atmosphere_boundary_layer + + specific_gravitational_potential_energy - - lwe_stratiform_snowfall_rate + + surface_drag_coefficient_in_air - - lwe_thickness_of_stratiform_snowfall_amount + + sea_surface_swell_wave_period - - northward_water_vapor_flux_in_air + + mass_fraction_of_convective_cloud_condensed_water_in_air + + + + mass_fraction_of_ozone_in_air + + + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + + + upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + + + upward_eliassen_palm_flux_in_air + + + + water_flux_into_sea_water + + + + wind_mixing_energy_flux_into_sea_water + + + + surface_net_downward_radiative_flux + + + + surface_upward_sensible_heat_flux @@ -34139,1769 +34374,1748 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. stratiform_snowfall_amount - - stratiform_snowfall_flux - - - - thickness_of_stratiform_rainfall_amount + + lwe_stratiform_snowfall_rate - - thickness_of_stratiform_snowfall_amount + + northward_water_vapor_flux_in_air atmosphere_mass_content_of_cloud_condensed_water - - atmosphere_mass_content_of_cloud_ice - - - - atmosphere_mass_content_of_convective_cloud_condensed_water - - - - atmosphere_mass_content_of_water_vapor + + surface_downward_mole_flux_of_carbon_dioxide - surface_downward_mole_flux_of_carbon_dioxide surface_upward_mole_flux_of_carbon_dioxide - - atmosphere_mass_content_of_sulfate + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - atmosphere_mass_content_of_sulfate + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - lwe_thickness_of_atmosphere_mass_content_of_water_vapor + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - mass_content_of_cloud_condensed_water_in_atmosphere_layer + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - mass_content_of_cloud_ice_in_atmosphere_layer + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - mass_content_of_water_in_atmosphere_layer + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - mass_content_of_water_vapor_in_atmosphere_layer + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_due_to_advection + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection + + number_concentration_of_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + sea_surface_wave_significant_height - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - sea_water_x_velocity - - - - sea_water_y_velocity - - - - x_wind + + mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - y_wind + + mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - land_ice_surface_specific_mass_balance_rate + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - land_ice_lwe_surface_specific_mass_balance_rate + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - isotropic_radiance_per_unit_wavelength_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - isotropic_radiance_per_unit_wavelength_in_air + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - mass_concentration_of_chlorophyll_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - mass_concentration_of_chlorophyll_in_sea_water + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_convective_available_potential_energy + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - atmosphere_convective_available_potential_energy + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - gross_primary_productivity_of_biomass_expressed_as_carbon + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - net_primary_productivity_of_biomass_expressed_as_carbon + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - mass_fraction_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling + + sea_surface_swell_wave_significant_height - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition + + sea_surface_wind_wave_significant_height - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + mass_content_of_water_in_soil - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + mass_content_of_water_in_soil_layer - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + sea_surface_swell_wave_to_direction - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + sea_surface_wind_wave_to_direction - - angstrom_exponent_of_ambient_aerosol_in_air + + sea_surface_swell_wave_mean_period - - atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles + + sea_surface_wave_mean_period - - atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + sea_surface_wind_wave_mean_period - - atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + sea_floor_depth_below_mean_sea_level - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + air_pressure_at_mean_sea_level - - atmosphere_mass_content_of_mercury_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - atmosphere_mass_content_of_nitrate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - mass_concentration_of_mercury_dry_aerosol_particles_in_air + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + surface_geostrophic_eastward_sea_water_velocity - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + tendency_of_sea_surface_height_above_mean_sea_level - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air + + surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + surface_geostrophic_northward_sea_water_velocity - - mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition - - number_concentration_of_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + sea_floor_depth_below_geoid - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + sea_surface_height_above_mean_sea_level - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + sea_surface_height_above_mean_sea_level - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition + + sea_surface_height_above_geoid - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + sea_surface_height_above_geoid - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning + + atmosphere_convective_available_potential_energy - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + atmosphere_convective_available_potential_energy - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport + + angstrom_exponent_of_ambient_aerosol_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission + + atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission + + atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition - - sea_surface_swell_wave_significant_height + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition - - sea_surface_wind_wave_significant_height + + integral_wrt_time_of_surface_downward_latent_heat_flux - - sea_surface_wave_significant_height + + integral_wrt_time_of_surface_downward_sensible_heat_flux - - mass_content_of_water_in_soil_layer + + integral_wrt_time_of_surface_net_downward_longwave_flux - - mass_content_of_water_in_soil + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection - - sea_surface_swell_wave_to_direction + + integral_wrt_time_of_toa_net_downward_shortwave_flux - - sea_surface_wind_wave_to_direction + + integral_wrt_time_of_toa_outgoing_longwave_flux - - sea_surface_wave_mean_period + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection - - sea_surface_wind_wave_mean_period + + ocean_heat_x_transport_due_to_parameterized_eddy_advection - - sea_surface_swell_wave_mean_period + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + ocean_heat_y_transport_due_to_parameterized_eddy_advection - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + integral_wrt_time_of_surface_net_downward_shortwave_flux - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + lwe_stratiform_precipitation_rate - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + litter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + stratiform_precipitation_flux - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_moles_of_carbon_tetrachloride - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + stem_mass_content_of_carbon - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + subsurface_litter_mass_content_of_carbon - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + mass_flux_of_carbon_into_litter_from_vegetation - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + eastward_transformed_eulerian_mean_air_velocity - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + northward_transformed_eulerian_mean_air_velocity - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + surface_litter_mass_content_of_carbon - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + carbon_mass_content_of_forestry_and_agricultural_products - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + carbon_mass_content_of_forestry_and_agricultural_products - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + miscellaneous_living_matter_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + root_mass_content_of_carbon - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + slow_soil_pool_mass_content_of_carbon - - sea_floor_depth_below_mean_sea_level + + soil_mass_content_of_carbon - - sea_surface_height_above_mean_sea_level + + fast_soil_pool_mass_content_of_carbon - - sea_surface_height_above_mean_sea_level + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + medium_soil_pool_mass_content_of_carbon - - surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + integral_wrt_depth_of_sea_water_practical_salinity - - tendency_of_sea_surface_height_above_mean_sea_level + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - surface_geostrophic_northward_sea_water_velocity + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - surface_geostrophic_eastward_sea_water_velocity + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition + + water_flux_into_sea_water_from_rivers - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition + + wood_debris_mass_content_of_carbon - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + stratiform_graupel_flux - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + surface_water_evaporation_flux - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + water_volume_transport_into_sea_water_from_rivers - - sea_surface_height_above_geoid + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - sea_surface_height_above_geoid + + surface_upwelling_radiance_per_unit_wavelength_in_air - - sea_floor_depth_below_geoid + + surface_upwelling_radiative_flux_per_unit_wavelength_in_air - - air_pressure_at_mean_sea_level + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - lagrangian_tendency_of_air_pressure + + surface_upwelling_longwave_flux_in_air - - lagrangian_tendency_of_air_pressure + + incoming_water_volume_transport_along_river_channel - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + surface_upwelling_shortwave_flux_in_air - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + upwelling_radiance_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + upwelling_radiative_flux_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + downwelling_radiance_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + downwelling_radiative_flux_per_unit_wavelength_in_air - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + surface_downwelling_longwave_flux_in_air - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - integral_wrt_time_of_surface_downward_latent_heat_flux + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - integral_wrt_time_of_surface_net_downward_longwave_flux + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air - - integral_wrt_time_of_toa_outgoing_longwave_flux + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + magnitude_of_sea_ice_displacement - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + lwe_thickness_of_stratiform_precipitation_amount - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + outgoing_water_volume_transport_along_river_channel - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + sea_ice_temperature_expressed_as_heat_content - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + sea_ice_temperature_expressed_as_heat_content - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + sea_water_potential_temperature_expressed_as_heat_content - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + sea_water_potential_temperature_expressed_as_heat_content - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + surface_downwelling_shortwave_flux_in_air - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + downwelling_radiance_per_unit_wavelength_in_sea_water - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + downwelling_photon_flux_per_unit_wavelength_in_sea_water - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + surface_downwelling_radiance_per_unit_wavelength_in_sea_water - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + precipitation_flux_onto_canopy - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + water_evaporation_flux_from_canopy - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + atmosphere_mass_content_of_convective_cloud_condensed_water - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water + + tendency_of_atmosphere_moles_of_nitrous_oxide - - stem_mass_content_of_carbon + + lwe_convective_precipitation_rate - - subsurface_litter_mass_content_of_carbon + + stratiform_precipitation_amount - - mass_flux_of_carbon_into_litter_from_vegetation + + atmosphere_mass_content_of_cloud_ice - - litter_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - surface_litter_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - eastward_transformed_eulerian_mean_air_velocity + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - northward_transformed_eulerian_mean_air_velocity + + water_evaporation_amount_from_canopy - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + platform_pitch - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + platform_id - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + platform_name - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + water_vapor_partial_pressure_in_air - - carbon_mass_content_of_forestry_and_agricultural_products + + platform_roll - - carbon_mass_content_of_forestry_and_agricultural_products + + platform_yaw - - leaf_mass_content_of_carbon + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction - - medium_soil_pool_mass_content_of_carbon + + tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes - - fast_soil_pool_mass_content_of_carbon + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - miscellaneous_living_matter_mass_content_of_carbon + + mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - root_mass_content_of_carbon + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton - - slow_soil_pool_mass_content_of_carbon + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms - - soil_mass_content_of_carbon + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization - - integral_wrt_depth_of_sea_water_practical_salinity + + mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + tendency_of_atmosphere_moles_of_carbon_monoxide - - water_flux_into_sea_water_from_rivers + + tendency_of_atmosphere_moles_of_carbon_tetrachloride - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning - - wood_debris_mass_content_of_carbon + + water_vapor_saturation_deficit_in_air - - stratiform_graupel_flux + + mole_fraction_of_inorganic_bromine_in_air - - water_volume_transport_into_sea_water_from_rivers + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - surface_water_evaporation_flux + + tendency_of_atmosphere_moles_of_methane - - sea_ice_temperature_expressed_as_heat_content + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - sea_ice_temperature_expressed_as_heat_content + + mole_fraction_of_methylglyoxal_in_air - - sea_water_potential_temperature_expressed_as_heat_content + + atmosphere_moles_of_hcc140a - - sea_water_potential_temperature_expressed_as_heat_content + + mass_fraction_of_mercury_dry_aerosol_particles_in_air - - incoming_water_volume_transport_along_river_channel + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change - - surface_upwelling_longwave_flux_in_air + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure - - surface_upwelling_radiance_per_unit_wavelength_in_air + + atmosphere_moles_of_halon2402 - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + effective_radius_of_convective_cloud_snow_particles - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + effective_radius_of_stratiform_cloud_rain_particles - - surface_upwelling_radiance_per_unit_wavelength_in_sea_water + + effective_radius_of_stratiform_cloud_graupel_particles - - surface_upwelling_radiative_flux_per_unit_wavelength_in_air + + effective_radius_of_convective_cloud_ice_particles - - surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + effective_radius_of_stratiform_cloud_ice_particles - - surface_upwelling_shortwave_flux_in_air + + diameter_of_ambient_aerosol_particles - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - upwelling_radiance_per_unit_wavelength_in_air + + lagrangian_tendency_of_atmosphere_sigma_coordinate - - upwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition - - upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + atmosphere_moles_of_hcfc22 - - upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_moles_of_hcfc22 - - surface_upwelling_longwave_flux_in_air_assuming_clear_sky + + tendency_of_troposphere_moles_of_hcfc22 - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky + + tendency_of_middle_atmosphere_moles_of_hcc140a - - downwelling_photon_flux_per_unit_wavelength_in_sea_water + + tendency_of_troposphere_moles_of_hcc140a - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + effective_radius_of_convective_cloud_rain_particles - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_hcc140a - - downwelling_radiance_per_unit_wavelength_in_air + + tendency_of_atmosphere_moles_of_halon2402 - - downwelling_radiance_per_unit_wavelength_in_sea_water + + atmosphere_moles_of_halon1301 - - downwelling_radiative_flux_per_unit_wavelength_in_air + + atmosphere_moles_of_cfc115 - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_halon1301 - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + atmosphere_moles_of_halon1211 - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_halon1211 - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + atmosphere_moles_of_halon1202 - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + tendency_of_atmosphere_moles_of_halon1202 - - surface_downwelling_longwave_flux_in_air + + atmosphere_moles_of_cfc12 - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_cfc12 - - surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_cfc115 - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + atmosphere_moles_of_cfc114 - - surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_cfc114 - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + atmosphere_moles_of_cfc113 - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_moles_of_cfc113 - - surface_downwelling_shortwave_flux_in_air + + atmosphere_moles_of_cfc11 - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + moles_of_cfc11_per_unit_mass_in_sea_water - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_moles_of_cfc11 - - surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - magnitude_of_sea_ice_displacement + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure - - outgoing_water_volume_transport_along_river_channel + + backscattering_ratio_in_air - - precipitation_flux_onto_canopy + + atmosphere_mass_content_of_mercury_dry_aerosol_particles - - water_evaporation_flux_from_canopy + + atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - water_evaporation_amount_from_canopy + + atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - lwe_stratiform_precipitation_rate + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air - - lwe_thickness_of_stratiform_precipitation_amount + + effective_radius_of_stratiform_cloud_snow_particles - - stratiform_precipitation_amount + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - stratiform_precipitation_flux + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + electrical_mobility_diameter_of_ambient_aerosol_particles - - platform_roll + + atmosphere_moles_of_methane - - platform_pitch + + tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing - - platform_yaw + + product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height - - platform_id + + volume_fraction_of_condensed_water_in_soil - - platform_name + + volume_fraction_of_condensed_water_in_soil_at_critical_point - - water_vapor_partial_pressure_in_air + + volume_fraction_of_condensed_water_in_soil_at_field_capacity - - tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction + + volume_fraction_of_condensed_water_in_soil_at_wilting_point - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing - - mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_calcareous_phytoplankton + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diatoms + + atmosphere_moles_of_methyl_bromide - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_phytoplankton + + tendency_of_atmosphere_moles_of_methyl_bromide - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_picophytoplankton + + integral_wrt_depth_of_product_of_salinity_and_sea_water_density - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_due_to_nitrate_utilization + + atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles - - mole_concentration_of_phytoplankton_expressed_as_nitrogen_in_sea_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_moles_of_carbon_monoxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - water_vapor_saturation_deficit_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - mole_fraction_of_inorganic_bromine_in_air + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - tendency_of_atmosphere_moles_of_methane + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - mole_fraction_of_methylglyoxal_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - atmosphere_moles_of_carbon_tetrachloride + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - floating_ice_shelf_area_fraction + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - stratiform_cloud_area_fraction + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition + + atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - backscattering_ratio_in_air + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - effective_radius_of_convective_cloud_ice_particles + + atmosphere_mass_content_of_sulfate - - effective_radius_of_convective_cloud_rain_particles + + atmosphere_mass_content_of_sulfate - - effective_radius_of_convective_cloud_snow_particles + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - effective_radius_of_stratiform_cloud_graupel_particles + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - effective_radius_of_stratiform_cloud_ice_particles + + tendency_of_middle_atmosphere_moles_of_methyl_bromide - - effective_radius_of_stratiform_cloud_rain_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - diameter_of_ambient_aerosol_particles + + atmosphere_mass_content_of_water_vapor - - electrical_mobility_diameter_of_ambient_aerosol_particles + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_water_vapor - - atmosphere_moles_of_hcfc22 + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - tendency_of_atmosphere_moles_of_hcfc22 + + tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - tendency_of_troposphere_moles_of_hcfc22 + + atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - tendency_of_middle_atmosphere_moles_of_hcc140a + + atmosphere_net_upward_convective_mass_flux - - tendency_of_troposphere_moles_of_hcc140a + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - atmosphere_moles_of_hcc140a + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_moles_of_hcc140a + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles - - atmosphere_moles_of_halon2402 + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles - - tendency_of_atmosphere_moles_of_halon2402 + + equivalent_thickness_at_stp_of_atmosphere_ozone_content - - atmosphere_moles_of_halon1301 + + lwe_thickness_of_stratiform_snowfall_amount - - tendency_of_atmosphere_moles_of_halon1301 + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles - - atmosphere_moles_of_halon1211 + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_moles_of_halon1211 + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_moles_of_halon1202 + + thickness_of_stratiform_snowfall_amount - - tendency_of_atmosphere_moles_of_halon1202 + + sea_water_velocity_from_direction - - atmosphere_moles_of_cfc12 + + eastward_water_vapor_flux_in_air - - tendency_of_atmosphere_moles_of_cfc12 + + gross_primary_productivity_of_biomass_expressed_as_carbon - - atmosphere_moles_of_cfc115 + + sea_water_velocity_to_direction - - tendency_of_atmosphere_moles_of_cfc115 + + sea_water_velocity_to_direction - - atmosphere_moles_of_cfc114 + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc114 + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - atmosphere_moles_of_cfc113 + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - tendency_of_atmosphere_moles_of_cfc113 + + mass_content_of_cloud_ice_in_atmosphere_layer - - atmosphere_moles_of_cfc11 + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - moles_of_cfc11_per_unit_mass_in_sea_water + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - tendency_of_atmosphere_moles_of_cfc11 + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - effective_radius_of_stratiform_cloud_snow_particles + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + atmosphere_mass_content_of_cloud_liquid_water - - tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + effective_radius_of_stratiform_cloud_liquid_water_particles - - product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height + + effective_radius_of_cloud_liquid_water_particles - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + effective_radius_of_convective_cloud_liquid_water_particles - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - volume_fraction_of_condensed_water_in_soil + + effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - volume_fraction_of_condensed_water_in_soil_at_critical_point + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - volume_fraction_of_condensed_water_in_soil_at_field_capacity + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - volume_fraction_of_condensed_water_in_soil_at_wilting_point + + mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - integral_wrt_depth_of_product_of_salinity_and_sea_water_density + + net_primary_productivity_of_biomass_expressed_as_carbon - - sea_water_velocity_to_direction + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves - - sea_water_velocity_to_direction + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood - - sea_water_velocity_from_direction + + northward_eliassen_palm_flux_in_air - - atmosphere_mass_content_of_cloud_liquid_water + + northward_heat_flux_in_air_due_to_eddy_advection - - effective_radius_of_cloud_liquid_water_particles + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - effective_radius_of_convective_cloud_liquid_water_particles + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - effective_radius_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top + + tendency_of_troposphere_moles_of_carbon_monoxide - - effective_radius_of_stratiform_cloud_liquid_water_particles + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - effective_radius_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + wave_frequency @@ -35912,6 +36126,10 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + biological_taxon_lsid + + mass_content_of_cloud_liquid_water_in_atmosphere_layer @@ -35924,6 +36142,18 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. atmosphere_mass_content_of_convective_cloud_liquid_water + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water @@ -35936,44 +36166,32 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_melting_to_cloud_liquid_water - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - - - iron_growth_limitation_of_diazotrophic_phytoplankton - - mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water - - - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + mass_fraction_of_liquid_precipitation_in_air - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + mass_fraction_of_liquid_precipitation_in_air - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - mass_fraction_of_liquid_precipitation_in_air + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance - - mass_fraction_of_liquid_precipitation_in_air + + iron_growth_limitation_of_diazotrophic_phytoplankton @@ -35984,24 +36202,20 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. area_type - - upward_derivative_of_eastward_wind - - upward_derivative_of_northward_wind - - atmosphere_upward_absolute_vorticity - - atmosphere_upward_relative_vorticity - - surface_snow_density + + atmosphere_upward_absolute_vorticity + + + + upward_derivative_of_eastward_wind @@ -36032,32 +36246,28 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. soot_content_of_surface_snow - - biological_taxon_lsid + + surface_snow_density water_evapotranspiration_flux - - drainage_amount_through_base_of_soil_model - - moles_of_particulate_inorganic_carbon_per_unit_mass_in_sea_water - - water_flux_into_sea_water_due_to_flux_adjustment + + drainage_amount_through_base_of_soil_model heat_flux_into_sea_water_due_to_flux_adjustment - - volume_backwards_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + water_flux_into_sea_water_due_to_flux_adjustment @@ -36072,8 +36282,8 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. volume_extinction_coefficient_of_radiative_flux_in_air_due_to_cloud_particles - - volume_attenuated_backwards_scattering_coefficient_of_radiative_flux_in_air_assuming_no_aerosol_or_cloud + + volume_backwards_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles @@ -36084,6 +36294,10 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. volume_absorption_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + volume_attenuated_backwards_scattering_coefficient_of_radiative_flux_in_air_assuming_no_aerosol_or_cloud + + air_equivalent_temperature @@ -36171,6 +36385,22 @@ http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + + upward_heat_flux_at_base_of_grounded_ice_sheet + + + + sea_water_volume_fraction + + + + floating_ice_shelf_area_fraction + + + + stratiform_cloud_area_fraction + From 8687e0b8c2064a287b2791970024d631113ccc2c Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 28 Oct 2024 14:45:26 +0000 Subject: [PATCH 08/74] What's new updates for v3.11.0rc0 . (#6201) --- docs/src/whatsnew/{latest.rst => 3.11.rst} | 26 ++++- docs/src/whatsnew/index.rst | 4 +- docs/src/whatsnew/latest.rst.template | 107 --------------------- 3 files changed, 24 insertions(+), 113 deletions(-) rename docs/src/whatsnew/{latest.rst => 3.11.rst} (84%) delete mode 100644 docs/src/whatsnew/latest.rst.template diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/3.11.rst similarity index 84% rename from docs/src/whatsnew/latest.rst rename to docs/src/whatsnew/3.11.rst index 6700774480..6917a303a4 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/3.11.rst @@ -1,13 +1,13 @@ .. include:: ../common_links.inc -|iris_version| |build_date| [unreleased] -**************************************** +v3.11 (28 Oct 2024) [release candidate] +*************************************** This document explains the changes made to Iris for this release (:doc:`View all changes `.) -.. dropdown:: |iris_version| Release Highlights +.. dropdown:: v3.11 Release Highlights :color: primary :icon: info :animate: fade-in @@ -15,7 +15,25 @@ This document explains the changes made to Iris for this release The highlights for this major/minor release of Iris include: - * N/A + * Iris is now compliant with NumPy v2. See the notes below for how this might + affect your scripts. + + * Loading now supports varying reference fields for hybrid vertical + coordinates, e.g. a time-varying orography. This is controlled by the + :meth:`~iris.LOAD_POLICY` object : see :class:`~iris.LoadPolicy`. + + * We now have type hints in :class:`~iris.cube.Cube`, and + :meth:`iris.cube.CubeList.concatenate` is in places almost an order of + magnitude faster! + + * `@bouweandela`_ added type hints for :class:`~iris.cube.Cube`. + + * Checkout the significant performance enhancements section for a couple of + improvements to the performance of Iris. + Special thanks to `@bouweandela`_ for these contributions. + + * We added ``colorbar`` keyword to allow optional creation of + the colorbar in three quickplot methods! And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index 74cb0cd43d..c3e71b358c 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -6,13 +6,13 @@ What's New in Iris ------------------ -.. include:: latest.rst +.. include:: 3.11.rst .. toctree:: :maxdepth: 1 :hidden: - latest.rst + 3.11.rst 3.10.rst 3.9.rst 3.8.rst diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template deleted file mode 100644 index fedddec5c0..0000000000 --- a/docs/src/whatsnew/latest.rst.template +++ /dev/null @@ -1,107 +0,0 @@ -.. include:: ../common_links.inc - -|iris_version| |build_date| [unreleased] -**************************************** - -This document explains the changes made to Iris for this release -(:doc:`View all changes `.) - - -.. dropdown:: |iris_version| Release Highlights - :color: primary - :icon: info - :animate: fade-in - :open: - - The highlights for this major/minor release of Iris include: - - * N/A - - And finally, get in touch with us on :issue:`GitHub` if you have - any issues or feature requests for improving Iris. Enjoy! - - -NOTE: section BELOW is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - -|iris_version| |build_date| -=========================== - -.. dropdown:: |iris_version| Patches - :color: primary - :icon: alert - :animate: fade-in - - The patches in this release of Iris include: - - #. N/A - -NOTE: section ABOVE is a template for bugfix patches -==================================================== - (Please remove this section when creating an initial 'latest.rst') - - -📢 Announcements -================ - -#. N/A - - -✨ Features -=========== - -#. N/A - - -🐛 Bugs Fixed -============= - -#. N/A - - -💣 Incompatible Changes -======================= - -#. N/A - - -🚀 Performance Enhancements -=========================== - -#. N/A - - -🔥 Deprecations -=============== - -#. N/A - - -🔗 Dependencies -=============== - -#. N/A - - -📚 Documentation -================ - -#. N/A - - -💼 Internal -=========== - -#. N/A - - -.. comment - Whatsnew author names (@github name) in alphabetical order. Note that, - core dev names are automatically included by the common_links.inc: - - - - -.. comment - Whatsnew resources in alphabetical order: \ No newline at end of file From 60cc56fe7df28c1d4f4455169baa1285fa71016d Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 28 Oct 2024 16:52:54 +0000 Subject: [PATCH 09/74] correct major minor in whatsnew (#6202) --- docs/src/whatsnew/3.11.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/whatsnew/3.11.rst b/docs/src/whatsnew/3.11.rst index 6917a303a4..b6fcae64c1 100644 --- a/docs/src/whatsnew/3.11.rst +++ b/docs/src/whatsnew/3.11.rst @@ -13,7 +13,7 @@ This document explains the changes made to Iris for this release :animate: fade-in :open: - The highlights for this major/minor release of Iris include: + The highlights for this minor release of Iris include: * Iris is now compliant with NumPy v2. See the notes below for how this might affect your scripts. From 4ca96b8dc8ee21548ab49425ef72f26a9b5026c3 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 30 Oct 2024 11:26:56 +0000 Subject: [PATCH 10/74] Restore latest Whats New files. --- docs/src/whatsnew/index.rst | 3 +- docs/src/whatsnew/latest.rst | 86 +++++++++++++++++++++ docs/src/whatsnew/latest.rst.template | 107 ++++++++++++++++++++++++++ 3 files changed, 195 insertions(+), 1 deletion(-) create mode 100644 docs/src/whatsnew/latest.rst create mode 100644 docs/src/whatsnew/latest.rst.template diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index c3e71b358c..34e516c23d 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -6,12 +6,13 @@ What's New in Iris ------------------ -.. include:: 3.11.rst +.. include:: latest.rst .. toctree:: :maxdepth: 1 :hidden: + latest.rst 3.11.rst 3.10.rst 3.9.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst new file mode 100644 index 0000000000..9022446cb8 --- /dev/null +++ b/docs/src/whatsnew/latest.rst @@ -0,0 +1,86 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. N/A + + +🐛 Bugs Fixed +============= + +#. N/A + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + + + +.. comment + Whatsnew resources in alphabetical order: \ No newline at end of file diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template new file mode 100644 index 0000000000..fedddec5c0 --- /dev/null +++ b/docs/src/whatsnew/latest.rst.template @@ -0,0 +1,107 @@ +.. include:: ../common_links.inc + +|iris_version| |build_date| [unreleased] +**************************************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: |iris_version| Release Highlights + :color: primary + :icon: info + :animate: fade-in + :open: + + The highlights for this major/minor release of Iris include: + + * N/A + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +NOTE: section BELOW is a template for bugfix patches +==================================================== + (Please remove this section when creating an initial 'latest.rst') + +|iris_version| |build_date| +=========================== + +.. dropdown:: |iris_version| Patches + :color: primary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + #. N/A + +NOTE: section ABOVE is a template for bugfix patches +==================================================== + (Please remove this section when creating an initial 'latest.rst') + + +📢 Announcements +================ + +#. N/A + + +✨ Features +=========== + +#. N/A + + +🐛 Bugs Fixed +============= + +#. N/A + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. N/A + + +🔥 Deprecations +=============== + +#. N/A + + +🔗 Dependencies +=============== + +#. N/A + + +📚 Documentation +================ + +#. N/A + + +💼 Internal +=========== + +#. N/A + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + + + + +.. comment + Whatsnew resources in alphabetical order: \ No newline at end of file From b0bdf28f06e2dc7e384045033fd25ac18659afd2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 10:20:09 +0000 Subject: [PATCH 11/74] [pre-commit.ci] pre-commit autoupdate (#6205) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.0 → v0.7.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.0...v0.7.1) - [github.com/asottile/blacken-docs: 1.19.0 → 1.19.1](https://github.com/asottile/blacken-docs/compare/1.19.0...1.19.1) - [github.com/pre-commit/mirrors-mypy: v1.12.1 → v1.13.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.12.1...v1.13.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 053e4f839a..acc88476e9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.0" + rev: "v0.7.1" hooks: - id: ruff types: [file, python] @@ -51,7 +51,7 @@ repos: types: [file, python] - repo: https://github.com/asottile/blacken-docs - rev: 1.19.0 + rev: 1.19.1 hooks: - id: blacken-docs types: [file, rst] @@ -63,7 +63,7 @@ repos: types: [file, python] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.12.1' + rev: 'v1.13.0' hooks: - id: mypy additional_dependencies: From 6ec6360de8bf15254088840e77e4871cd2c43af1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 31 Oct 2024 10:14:35 +0000 Subject: [PATCH 12/74] Bump scitools/workflows from 2024.10.2 to 2024.10.3 (#6208) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.10.2 to 2024.10.3. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.10.2...2024.10.3) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 57dd7e0371..93e534a21c 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.10.2 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.10.3 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index f01a7d9b33..898c5fe1b4 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.10.2 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.10.3 secrets: inherit From 4646752e3597e07e390a5b42a12f9ed5aac5e48f Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 28 Feb 2025 16:52:32 +0000 Subject: [PATCH 13/74] WIP promoting the bounds to a cube --- .../contributing_pytest_conversions.rst | 56 + .../contributing_running_tests.rst | 2 +- .../developers_guide/contributing_testing.rst | 147 - .../contributing_testing_index.rst | 4 +- .../developers_guide/contributing_tests.rst | 264 ++ docs/src/developers_guide/testing_tools.rst | 80 - docs/src/whatsnew/latest.rst | 16 +- lib/iris/tests/_shared_utils.py | 998 +++++++ lib/iris/tests/conftest.py | 55 + lib/iris/tests/graphics/README.md | 7 +- lib/iris/tests/graphics/__init__.py | 59 +- lib/iris/tests/graphics/idiff.py | 3 +- lib/iris/tests/graphics/recreate_imagerepo.py | 4 +- .../tests/integration/plot/test_animate.py | 20 +- .../tests/integration/plot/test_colorbar.py | 48 +- .../tests/integration/plot/test_netcdftime.py | 19 +- .../tests/integration/plot/test_nzdateline.py | 24 +- .../integration/plot/test_plot_2d_coords.py | 25 +- .../integration/plot/test_vector_plots.py | 26 +- .../mask_cube_2d_create_new_dim.cml | 0 lib/iris/tests/test_aggregate_by.py | 320 ++- lib/iris/tests/test_analysis.py | 657 +++-- lib/iris/tests/test_analysis_calculus.py | 242 +- lib/iris/tests/test_lazy_aggregate_by.py | 79 +- lib/iris/tests/test_mapping.py | 104 +- lib/iris/tests/test_plot.py | 541 ++-- lib/iris/tests/test_pp_cf.py | 34 +- lib/iris/tests/test_pp_module.py | 360 ++- lib/iris/tests/test_pp_stash.py | 154 +- lib/iris/tests/test_pp_to_cube.py | 111 +- lib/iris/tests/test_quickplot.py | 98 +- lib/iris/tests/test_util.py | 138 +- .../test_AtmosphereSigmaFactory.py | 128 +- .../unit/aux_factory/test_AuxCoordFactory.py | 94 +- .../aux_factory/test_HybridPressureFactory.py | 147 +- .../unit/aux_factory/test_OceanSFactory.py | 169 +- .../unit/aux_factory/test_OceanSg1Factory.py | 157 +- .../unit/aux_factory/test_OceanSg2Factory.py | 157 +- .../aux_factory/test_OceanSigmaFactory.py | 101 +- .../aux_factory/test_OceanSigmaZFactory.py | 197 +- .../tests/unit/common/lenient/test_Lenient.py | 221 +- .../unit/common/lenient/test__Lenient.py | 558 ++-- .../common/lenient/test__lenient_client.py | 108 +- .../common/lenient/test__lenient_service.py | 62 +- .../unit/common/lenient/test__qualname.py | 30 +- .../test_AncillaryVariableMetadata.py | 467 ++-- .../unit/common/metadata/test_BaseMetadata.py | 1177 ++++---- .../metadata/test_CellMeasureMetadata.py | 574 ++-- .../common/metadata/test_CoordMetadata.py | 602 ++--- .../unit/common/metadata/test_CubeMetadata.py | 547 ++-- .../common/metadata/test__NamedTupleMeta.py | 58 +- .../unit/common/metadata/test_hexdigest.py | 74 +- .../common/metadata/test_metadata_filter.py | 133 +- .../metadata/test_metadata_manager_factory.py | 120 +- .../unit/common/mixin/test_CFVariableMixin.py | 219 +- .../common/mixin/test_LimitedAttributeDict.py | 49 +- .../mixin/test__get_valid_standard_name.py | 35 +- .../tests/unit/common/resolve/test_Resolve.py | 2390 +++++++++-------- .../unit/concatenate/test__CubeSignature.py | 89 +- .../unit/concatenate/test_concatenate.py | 418 ++- lib/iris/tests/unit/config/test_NetCDF.py | 53 +- lib/iris/tests/unit/cube/test_Cube.py | 1435 +++++----- .../tests/unit/cube/test_CubeAttrsDict.py | 6 +- lib/iris/tests/unit/cube/test_CubeList.py | 245 +- .../unit/cube/test_Cube__aggregated_by.py | 313 +-- .../tests/unit/cube/test_Cube__operators.py | 39 +- lib/iris/tests/unit/fileformats/__init__.py | 60 - .../unit/fileformats/abf/test_ABFField.py | 49 +- .../tests/unit/fileformats/cf/test_CFGroup.py | 28 +- .../unit/fileformats/cf/test_CFReader.py | 387 ++- .../unit/fileformats/dot/test__dot_path.py | 62 +- .../name_loaders/test__build_cell_methods.py | 70 +- ...test__build_lat_lon_for_NAME_timeseries.py | 43 +- .../test__calc_integration_period.py | 24 +- .../name_loaders/test__cf_height_from_name.py | 95 +- .../name_loaders/test__generate_cubes.py | 95 +- .../nimrod_load_rules/test_units.py | 124 +- .../nimrod_load_rules/test_vertical_coord.py | 34 +- .../fileformats/pp_load_rules/__init__.py | 23 + .../pp_load_rules/test__all_other_rules.py | 108 +- ...__collapse_degenerate_points_and_bounds.py | 43 +- ...est__convert_scalar_pseudo_level_coords.py | 20 +- ...test__convert_scalar_realization_coords.py | 20 +- .../test__convert_time_coords.py | 71 +- .../test__convert_vertical_coords.py | 64 +- .../pp_load_rules/test__dim_or_aux.py | 17 +- .../pp_load_rules/test__epoch_date_hours.py | 55 +- .../pp_load_rules/test__model_level_number.py | 16 +- .../test__reduced_points_and_bounds.py | 57 +- .../test__reshape_vector_args.py | 28 +- .../fileformats/pp_load_rules/test_convert.py | 107 +- .../unit/fileformats/rules/test_Loader.py | 47 +- .../unit/fileformats/rules/test__make_cube.py | 24 +- .../fileformats/{ => rules}/test_rules.py | 80 +- .../test_ArrayStructure.py | 96 +- .../test_GroupStructure.py | 20 +- .../unit/lazy_data/test_as_concrete_data.py | 45 +- .../tests/unit/lazy_data/test_as_lazy_data.py | 70 +- .../unit/lazy_data/test_co_realise_cubes.py | 25 +- .../tests/unit/lazy_data/test_is_lazy_data.py | 14 +- .../unit/lazy_data/test_lazy_elementwise.py | 29 +- .../lazy_data/test_map_complete_blocks.py | 61 +- .../lazy_data/test_multidim_lazy_stack.py | 17 +- .../tests/unit/lazy_data/test_non_lazy.py | 21 +- lib/iris/tests/unit/plot/__init__.py | 31 +- lib/iris/tests/unit/plot/_blockplot_common.py | 63 +- .../test__check_bounds_contiguity_and_mask.py | 36 +- ..._check_geostationary_coords_and_convert.py | 19 +- lib/iris/tests/unit/plot/test__fixup_dates.py | 31 +- .../tests/unit/plot/test__get_plot_defn.py | 21 +- ...est__get_plot_defn_custom_coords_picked.py | 41 +- .../tests/unit/plot/test__get_plot_objects.py | 25 +- .../test__replace_axes_with_cartopy_axes.py | 26 +- lib/iris/tests/unit/plot/test_contour.py | 33 +- lib/iris/tests/unit/plot/test_contourf.py | 60 +- lib/iris/tests/unit/plot/test_hist.py | 33 +- lib/iris/tests/unit/plot/test_outline.py | 33 +- lib/iris/tests/unit/plot/test_pcolor.py | 24 +- lib/iris/tests/unit/plot/test_pcolormesh.py | 22 +- lib/iris/tests/unit/plot/test_plot.py | 47 +- lib/iris/tests/unit/plot/test_points.py | 33 +- lib/iris/tests/unit/plot/test_scatter.py | 28 +- lib/iris/tests/unit/quickplot/test_contour.py | 27 +- .../tests/unit/quickplot/test_contourf.py | 33 +- lib/iris/tests/unit/quickplot/test_outline.py | 27 +- lib/iris/tests/unit/quickplot/test_pcolor.py | 27 +- .../tests/unit/quickplot/test_pcolormesh.py | 27 +- lib/iris/tests/unit/quickplot/test_plot.py | 34 +- lib/iris/tests/unit/quickplot/test_points.py | 27 +- lib/iris/tests/unit/quickplot/test_scatter.py | 24 +- .../tests/unit/util/test__coord_regular.py | 49 +- lib/iris/tests/unit/util/test__is_circular.py | 14 +- lib/iris/tests/unit/util/test__mask_array.py | 2 +- .../unit/util/test__slice_data_with_keys.py | 39 +- lib/iris/tests/unit/util/test_array_equal.py | 80 +- .../unit/util/test_broadcast_to_shape.py | 31 +- .../unit/util/test_column_slices_generator.py | 27 +- .../test_demote_dim_coord_to_aux_coord.py | 38 +- .../tests/unit/util/test_describe_diff.py | 27 +- .../unit/util/test_equalise_attributes.py | 33 +- .../unit/util/test_file_is_newer_than.py | 48 +- .../unit/util/test_find_discontiguities.py | 31 +- .../tests/unit/util/test_guess_coord_axis.py | 6 +- lib/iris/tests/unit/util/test_mask_cube.py | 87 +- .../util/test_mask_cube_from_shapefile.py | 6 +- lib/iris/tests/unit/util/test_new_axis.py | 8 +- .../test_promote_aux_coord_to_dim_coord.py | 57 +- lib/iris/tests/unit/util/test_reverse.py | 113 +- .../tests/unit/util/test_rolling_window.py | 32 +- lib/iris/tests/unit/util/test_squeeze.py | 25 +- .../tests/unit/util/test_unify_time_units.py | 21 +- pyproject.toml | 1 + requirements/locks/py310-linux-64.lock | 30 +- requirements/locks/py311-linux-64.lock | 30 +- requirements/locks/py312-linux-64.lock | 30 +- requirements/py310.yml | 1 + requirements/py311.yml | 1 + requirements/py312.yml | 1 + 158 files changed, 10238 insertions(+), 9578 deletions(-) create mode 100644 docs/src/developers_guide/contributing_pytest_conversions.rst delete mode 100644 docs/src/developers_guide/contributing_testing.rst create mode 100644 docs/src/developers_guide/contributing_tests.rst delete mode 100755 docs/src/developers_guide/testing_tools.rst create mode 100644 lib/iris/tests/_shared_utils.py create mode 100644 lib/iris/tests/conftest.py rename lib/iris/tests/results/unit/util/mask_cube/{TestCubeMask => CubeMask}/mask_cube_2d_create_new_dim.cml (100%) rename lib/iris/tests/unit/fileformats/{ => rules}/test_rules.py (81%) diff --git a/docs/src/developers_guide/contributing_pytest_conversions.rst b/docs/src/developers_guide/contributing_pytest_conversions.rst new file mode 100644 index 0000000000..c6bb35c2cd --- /dev/null +++ b/docs/src/developers_guide/contributing_pytest_conversions.rst @@ -0,0 +1,56 @@ +.. include:: ../common_links.inc + +.. _contributing_pytest_conversions: + +******************************************* +Converting From ``unittest`` to ``pytest`` +******************************************* + +Conversion Checklist +-------------------- +.. note:: + Please bear in mind the following checklist is for general use; there may be + some cases which require extra context or thought before implementing these changes. + +#. Before making any manual changes, run https://github.com/dannysepler/pytestify + on the file. This does a lot of the brunt work for you! +#. Check for references to :class:`iris.tests.IrisTest`. If a class inherits + from this, remove the inheritance. Inheritance is unnecessary for + pytest tests, so :class:`iris.tests.IrisTest` has been deprecated + and its convenience methods have been moved to the + :mod:`iris.tests._shared_utils` module. +#. Check for references to ``unittest``. Many of the functions within unittest + are also in pytest, so often you can just change where the function is imported + from. +#. Check for references to ``self.assert``. Pytest has a lighter-weight syntax for + assertions, e.g. ``assert x == 2`` instead of ``assertEqual(x, 2)``. In the + case of custom :class:`~iris.tests.IrisTest` assertions, the majority of these + have been replicated in + :mod:`iris.tests._shared_utils`, but with snake_case instead of camelCase. + Some :class:`iris.tests.IrisTest` assertions have not been converted into + :mod:`iris.tests._shared_utils`, as these were deemed easy to achieve via + simple ``assert ...`` statements. +#. Check for references to ``setUp()``. Replace this with ``_setup()`` instead. + Ensure that this is decorated with ``@pytest.fixture(autouse=True)``. + + .. code-block:: python + + @pytest.fixture(autouse=True) + def _setup(self): + ... + +#. Check for references to ``@tests``. These should be changed to ``@_shared_utils``. +#. Check for references to ``with mock.patch("...")``. These should be replaced with + ``mocker.patch("...")``. Note, ``mocker.patch("...")`` is NOT a context manager. +#. Check for ``np.testing.assert...``. This can usually be swapped for + ``_shared_utils.assert...``. +#. Check for references to ``super()``. Most test classes used to inherit from + :class:`iris.tests.IrisTest`, so references to this should be removed. +#. Check for references to ``self.tmp_dir``. In pytest, ``tmp_path`` is used instead, + and can be passed into functions as a fixture. +#. Check for ``if __name__ == 'main'``. This is no longer needed with pytest. +#. Check for ``mock.patch("warnings.warn")``. This can be replaced with + ``pytest.warns(match=message)``. +#. Check the file against https://github.com/astral-sh/ruff , using ``pip install ruff`` -> + ``ruff check --select PT ``. + diff --git a/docs/src/developers_guide/contributing_running_tests.rst b/docs/src/developers_guide/contributing_running_tests.rst index f60cedba05..a72caa5881 100644 --- a/docs/src/developers_guide/contributing_running_tests.rst +++ b/docs/src/developers_guide/contributing_running_tests.rst @@ -87,7 +87,7 @@ experimental dependency not being present. SKIPPED [1] lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py:29: Test(s) require external data. All Python decorators that skip tests will be defined in - ``lib/iris/tests/__init__.py`` with a function name with a prefix of + ``lib/iris/tests/_shared_utils.py`` with a function name with a prefix of ``skip_``. You can also run a specific test module. The example below runs the tests for diff --git a/docs/src/developers_guide/contributing_testing.rst b/docs/src/developers_guide/contributing_testing.rst deleted file mode 100644 index a65bcebd55..0000000000 --- a/docs/src/developers_guide/contributing_testing.rst +++ /dev/null @@ -1,147 +0,0 @@ -.. include:: ../common_links.inc - -.. _developer_test_categories: - - -Test Categories -*************** - -There are two main categories of tests within Iris: - -- :ref:`testing.unit_test` -- :ref:`testing.integration` - -Ideally, all code changes should be accompanied by one or more unit -tests, and by zero or more integration tests. - -But if in any doubt about what tests to add or how to write them please -feel free to submit a pull-request in any state and ask for assistance. - - -.. _testing.unit_test: - -Unit Tests -========== - -Code changes should be accompanied by enough unit tests to give a -high degree of confidence that the change works as expected. In -addition, the unit tests can help describe the intent behind a change. - -The docstring for each test module must state the unit under test. -For example: - - :literal:`"""Unit tests for the \`iris.experimental.raster.export_geotiff\` function."""` - -All unit tests must be placed and named according to the following -structure: - - -.. _testing.classes: - -Classes -------- - -When testing a class all the tests must reside in the module: - - :literal:`lib/iris/tests/unit//test_.py` - -Within this test module each tested method must have one or more -corresponding test classes, for example: - -* ``Test_`` -* ``Test___`` - -And within those test classes, the test methods must be named according -to the aspect of the tested method which they address. - -**Examples**: - -All unit tests for :py:class:`iris.cube.Cube` must reside in: - - :literal:`lib/iris/tests/unit/cube/test_Cube.py` - -Within that file the tests might look something like: - -.. code-block:: python - - # Tests for the Cube.xml() method. - class Test_xml(tests.IrisTest): - def test_some_general_stuff(self): - ... - - - # Tests for the Cube.xml() method, focussing on the behaviour of - # the checksums. - class Test_xml__checksum(tests.IrisTest): - def test_checksum_ignores_masked_values(self): - ... - - - # Tests for the Cube.add_dim_coord() method. - class Test_add_dim_coord(tests.IrisTest): - def test_normal_usage(self): - ... - - def test_coord_already_present(self): - ... - - -.. _testing.functions: - -Functions ---------- - -When testing a function all the tests must reside in the module: - - :literal:`lib/iris/tests/unit//test_.py` - -Within this test module there must be one or more test classes, for example: - -* ``Test`` -* ``TestAspectOfFunction`` - -And within those test classes, the test methods must be named according -to the aspect of the tested function which they address. - -**Examples**: - -All unit tests for :py:func:`iris.experimental.raster.export_geotiff` -must reside in: - - :literal:`lib/iris/tests/unit/experimental/raster/test_export_geotiff.py` - -Within that file the tests might look something like: - -.. code-block:: python - - # Tests focussing on the handling of different data types. - class TestDtypeAndValues(tests.IrisTest): - def test_int16(self): - ... - - def test_int16_big_endian(self): - ... - - - # Tests focussing on the handling of different projections. - class TestProjection(tests.IrisTest): - def test_no_ellipsoid(self): - ... - - -.. _testing.integration: - -Integration Tests -================= - -Some code changes may require tests which exercise several units in -order to demonstrate an important consequence of their interaction which -may not be apparent when considering the units in isolation. - -These tests must be placed in the ``lib/iris/tests/integration`` folder. -Unlike unit tests, there is no fixed naming scheme for integration -tests. But folders and files must be created as required to help -developers locate relevant tests. It is recommended they are named -according to the capabilities under test, e.g. -``metadata/test_pp_preservation.py``, and not named according to the -module(s) under test. diff --git a/docs/src/developers_guide/contributing_testing_index.rst b/docs/src/developers_guide/contributing_testing_index.rst index 2f5ae411e8..2d57da3d93 100644 --- a/docs/src/developers_guide/contributing_testing_index.rst +++ b/docs/src/developers_guide/contributing_testing_index.rst @@ -6,9 +6,9 @@ Testing .. toctree:: :maxdepth: 3 - contributing_testing - testing_tools + contributing_tests contributing_graphics_tests contributing_running_tests contributing_ci_tests contributing_benchmarks + contributing_pytest_conversions diff --git a/docs/src/developers_guide/contributing_tests.rst b/docs/src/developers_guide/contributing_tests.rst new file mode 100644 index 0000000000..e18a6987d2 --- /dev/null +++ b/docs/src/developers_guide/contributing_tests.rst @@ -0,0 +1,264 @@ +.. include:: ../common_links.inc + +.. _contributing_tests: + +************* +Writing Tests +************* + +.. note:: + If you're converting UnitTest tests to PyTest, check out + :ref:`contributing_pytest_conversions`. + +.. _developer_pytest_categories: + +Test Categories +=============== + +There are two main categories of tests within Iris: + +- `unit tests` +- `integration tests` + +Ideally, all code changes should be accompanied by one or more unit +tests, and by zero or more integration tests. + +Code changes should be accompanied by enough unit tests to give a +high degree of confidence that the change works as expected. In +addition, the unit tests can help describe the intent behind a change. + +The docstring for each test module must state the unit under test. +For example: + + :literal:`"""Unit tests for the \`iris.experimental.raster.export_geotiff\` function."""` + +When testing a class, all the tests must reside in the module: + + :literal:`lib/iris/tests/unit//test_.py` + +When testing a function, all the tests must reside in the module: + + :literal:`lib/iris/tests/unit//test_.py` + +Some code changes may require tests which exercise several units in +order to demonstrate an important consequence of their interaction which +may not be apparent when considering the units in isolation. These tests must +be placed in the ``lib/iris/tests/integration`` folder. + +With integration tests, folders and files must be created as required to help +developers locate relevant tests. It is recommended they are named +according to the capabilities under test, e.g. +``metadata/test_pp_preservation.py``, and not named according to the +module(s) under test. + +If in any doubt about what tests to add or how to write them please +feel free to submit a pull-request in any state and ask for assistance. + +.. _testing_style_guide: + +PyTest Style Guide +================== + +.. note:: + If you're converting UnitTest tests to PyTest, check out + :ref:`contributing_pytest_conversions`. + +This style guide should be approached pragmatically. Many of the guidelines laid out +below will not be practical in every scenario, and as such should not be considered +firm rules. + +At time of writing, some existing tests have already been written in PyTest, +so might not be abiding by these guidelines. + +`conftest.py `_ +----------------------------------------------------------------------------------------------------------------------------- + +There should be a ``conftest.py`` file in the ``root/unit`` and ``root/integration`` +folders. Additional lower level ``conftest``\s can be added if it is agreed there +is a need. + +`Fixtures `_ +------------------------------------------------------------------------------------ + +As far as is possible, the actual test function should do little else but the +actual assertion. Separating off preparation into fixtures may make the code +harder to follow, so compromises are acceptable. For example, setting up a test +``Cube`` should be a fixture, whereas creating a simple string +(``expected = "foo"``), or a single use setup, should *not* be a fixture. + + +New fixtures should always be considered for ``conftest`` when added. If it is +decided that they are not suitably reusable, they can be placed within the +local test file. + +`Parameterisation `_ +-------------------------------------------------------------------------------- + +Though it is a useful tool, we should not be complicating tests to work around +parameters; they should only be used when it is simple and apparent to implement. + +Where you are parameterising multiple tests with the same parameters, it is +usually prudent to use the `parameterisation within fixtures +`_. +When doing this, ensure within the tests that it is apparent that they are being +parameterised, either within the fixture name or with comments. + +All parameterisation benefits from +`ids `_, +and so should be used where possible. + +`Mocks `_ +-------------------------------------------------------------------- + +Any mocking should be done with ``pytest.mock``, and monkeypatching where suitable. + +.. note:: + If you think we're missing anything important here, please consider creating an + issue or discussion and share your ideas with the team! + +`Classes `_ +--------------------------------------------------------------------------------------------------- + +How and when to group tests within classes can be based on personal opinion, +we do not deem consistency on this a vital concern. + +Naming Test Classes and Functions +--------------------------------- + +When testing classes and their methods, each tested method within a test module +may have corresponding test classes, for example: + +* ``Test_`` +* ``Test___`` + +Within these test classes, the test methods must be named according +to the aspect of the tested method which they address. + +**Examples**: + +All unit tests for :py:class:`iris.cube.Cube` reside in: + + :literal:`lib/iris/tests/unit/cube/test_Cube.py` + +Within that file the tests might look something like: + +.. code-block:: python + + # A single test for the Cube.xml() method. + def test_xml_some_general_stuff(self): + ... + + + # A single test for the Cube.xml() method, focussing on the behaviour of + # the checksums. + def test_xml_checksum_ignores_masked_values(self): + ... + + + # Tests for the Cube.add_dim_coord() method. + class Test_add_dim_coord: + def test_normal_usage(self): + ... + + def test_coord_already_present(self): + ... + +When testing functions, within the test module there may be test classes, for +example: + +* ``Test`` +* ``TestAspectOfFunction`` + +Within those test classes, the test methods must be named according +to the aspect of the tested function which they address. + +**Examples**: + +All unit tests for :py:func:`iris.experimental.raster.export_geotiff` +must reside in: + + :literal:`lib/iris/tests/unit/experimental/raster/test_export_geotiff.py` + +Within that file the tests might look something like: + +.. code-block:: python + + # Tests focussing on the handling of different data types. + class TestDtypeAndValues: + def test_int16(self): + ... + + def test_int16_big_endian(self): + ... + + + # Tests focussing on the handling of different projections. + def test_no_ellipsoid(self): + ... + +There is no fixed naming scheme for integration tests. + +.. _testing_tools: + +Testing tools +============= + +.. note:: + :class:`iris.tests.IrisTest` has been deprecated, and replaced with + the :mod:`iris.tests._shared_utils` module. + +Iris has various internal convenience functions and utilities available to +support writing tests. Using these makes tests quicker and easier to write, and +also consistent with the rest of Iris (which makes it easier to work with the +code). Most of these conveniences are accessed through the +:mod:`iris.tests._shared_utils` module. + +.. tip:: + + All functions listed on this page are defined within + :mod:`iris.tests._shared_utils`. They can be accessed within a test using + ``_shared_utils.example_function``. + +Custom assertions +----------------- + +:mod:`iris.tests._shared_utils` supports a variety of custom pytest-style +assertions, such as :func:`~iris.tests._shared_utils.assert_array_equal`, and +:func:`~iris.tests._shared_utils.assert_array_almost_equal`. + +.. _create-missing: + +Saving results +-------------- + +Some tests compare the generated output to the expected result contained in a +file. Custom assertions for this include +:func:`~iris.tests._shared_utils.assert_CML_approx_data` +:func:`~iris.tests._shared_utils.assert_CDL` +:func:`~iris.tests._shared_utils.assert_CML` and +:func:`~iris.tests._shared_utils.assert_text_file`. See docstrings for more +information. + +.. note:: + + Sometimes code changes alter the results expected from a test containing the + above methods. These can be updated by removing the existing result files + and then running the file containing the test with a ``--create-missing`` + command line argument, or setting the ``IRIS_TEST_CREATE_MISSING`` + environment variable to anything non-zero. This will create the files rather + than erroring, allowing you to commit the updated results. + +Capturing exceptions and logging +-------------------------------- + +:mod:`~iris.tests._shared_utils` includes several context managers that can be used +to make test code tidier and easier to read. These include +:meth:`~iris.tests._shared_utils.assert_no_warnings_regexp` and +:meth:`~iris.tests._shared_utils.assert_logs`. + +Graphic tests +------------- + +As a package capable of generating graphical outputs, Iris has utilities for +creating and updating graphical tests - see :ref:`testing.graphics` for more +information. \ No newline at end of file diff --git a/docs/src/developers_guide/testing_tools.rst b/docs/src/developers_guide/testing_tools.rst deleted file mode 100755 index dd628d37fc..0000000000 --- a/docs/src/developers_guide/testing_tools.rst +++ /dev/null @@ -1,80 +0,0 @@ -.. include:: ../common_links.inc - -.. _testing_tools: - -Testing tools -************* - -Iris has various internal convenience functions and utilities available to -support writing tests. Using these makes tests quicker and easier to write, and -also consistent with the rest of Iris (which makes it easier to work with the -code). Most of these conveniences are accessed through the -:class:`iris.tests.IrisTest` class, from -which Iris' test classes then inherit. - -.. tip:: - - All functions listed on this page are defined within - :mod:`iris.tests.__init__.py` as methods of - :class:`iris.tests.IrisTest_nometa` (which :class:`iris.tests.IrisTest` - inherits from). They can be accessed within a test using - ``self.exampleFunction``. - -Custom assertions -================= - -:class:`iris.tests.IrisTest` supports a variety of custom unittest-style -assertions, such as :meth:`~iris.tests.IrisTest_nometa.assertArrayEqual`, -:meth:`~iris.tests.IrisTest_nometa.assertArrayAlmostEqual`. - -.. _create-missing: - -Saving results --------------- - -Some tests compare the generated output to the expected result contained in a -file. Custom assertions for this include -:meth:`~iris.tests.IrisTest_nometa.assertCMLApproxData` -:meth:`~iris.tests.IrisTest_nometa.assertCDL` -:meth:`~iris.tests.IrisTest_nometa.assertCML` and -:meth:`~iris.tests.IrisTest_nometa.assertTextFile`. See docstrings for more -information. - -.. note:: - - Sometimes code changes alter the results expected from a test containing the - above methods. These can be updated by removing the existing result files - and then running the file containing the test with a ``--create-missing`` - command line argument, or setting the ``IRIS_TEST_CREATE_MISSING`` - environment variable to anything non-zero. This will create the files rather - than erroring, allowing you to commit the updated results. - -Context managers -================ - -Capturing exceptions and logging --------------------------------- - -:class:`iris.tests.IrisTest` includes several context managers that can be used -to make test code tidier and easier to read. These include -:meth:`~iris.tests.IrisTest_nometa.assertWarnsRegexp` and -:meth:`~iris.tests.IrisTest_nometa.assertLogs`. - -Temporary files ---------------- - -It's also possible to generate temporary files in a concise fashion with -:meth:`~iris.tests.IrisTest_nometa.temp_filename`. - -Patching -======== - -:meth:`~iris.tests.IrisTest_nometa.patch` is a wrapper around ``unittest.patch`` -that will be automatically cleaned up at the end of the test. - -Graphic tests -============= - -As a package capable of generating graphical outputs, Iris has utilities for -creating and updating graphical tests - see :ref:`testing.graphics` for more -information. \ No newline at end of file diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 9022446cb8..7798e46481 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -42,7 +42,9 @@ This document explains the changes made to Iris for this release 💣 Incompatible Changes ======================= -#. N/A +#. :class:`iris.tests.IrisTest` is being replaced by :mod:`iris.tests._shared_utils`. + Once conversion from unittest to pytest is completed, :class:`iris.tests.IrisTest` + class will be deprecated. 🚀 Performance Enhancements @@ -66,13 +68,21 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. N/A +#. `@ESadek-MO`_ and `@trexfeathers`_ created :ref:`contributing_pytest_conversions` + as a guide for converting from ``unittest`` to ``pytest``. (:pull:`5785`) + +#. `@ESadek-MO`_ and `@trexfeathers`_ created a style guide for ``pytest`` tests, + and consolidated ``Test Categories`` and ``Testing Tools`` into + :ref:`contributing_tests` (:issue:`5574`, :pull:`5785`) 💼 Internal =========== -#. N/A +#. `@ESadek-MO`_ `@pp-mo`_ `@bjlittle`_ `@trexfeathers`_ and `@HGWright`_ have + converted around a third of Iris' ``unittest`` style tests to ``pytest``. This is + part of an ongoing effort to move from ``unittest`` to ``pytest``. (:pull:`6207`, + part of :issue:`6212`) .. comment diff --git a/lib/iris/tests/_shared_utils.py b/lib/iris/tests/_shared_utils.py new file mode 100644 index 0000000000..4a0d275cdd --- /dev/null +++ b/lib/iris/tests/_shared_utils.py @@ -0,0 +1,998 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Provides testing capabilities and customisations specific to Iris.""" + +import collections +from collections.abc import Mapping +import contextlib +import difflib +import filecmp +import functools +import gzip +import json +import math +import os +import os.path +from pathlib import Path +import re +import shutil +import subprocess +from typing import Optional +import warnings +import xml.dom.minidom +import zlib + +import numpy as np +import numpy.ma as ma +import pytest +import requests + +import iris.config +import iris.cube +import iris.fileformats +import iris.tests +import iris.tests.graphics as graphics +import iris.util + +MPL_AVAILABLE = graphics.MPL_AVAILABLE + + +try: + from osgeo import gdal # noqa +except ImportError: + GDAL_AVAILABLE = False +else: + GDAL_AVAILABLE = True + +try: + import iris_sample_data # noqa +except ImportError: + SAMPLE_DATA_AVAILABLE = False +else: + SAMPLE_DATA_AVAILABLE = True + +try: + import nc_time_axis # noqa + + NC_TIME_AXIS_AVAILABLE = True +except ImportError: + NC_TIME_AXIS_AVAILABLE = False + +try: + # Added a timeout to stop the call to requests.get hanging when running + # on a platform which has restricted/no internet access. + requests.get("https://github.com/SciTools/iris", timeout=10.0) + INET_AVAILABLE = True +except requests.exceptions.ConnectionError: + INET_AVAILABLE = False + +try: + import stratify # noqa + + STRATIFY_AVAILABLE = True +except ImportError: + STRATIFY_AVAILABLE = False + +#: Basepath for test results. +_RESULT_PATH = os.path.join(os.path.dirname(__file__), "results") + + +def _assert_masked_array(assertion, a, b, strict, **kwargs): + # Compare masks. + a_mask, b_mask = ma.getmaskarray(a), ma.getmaskarray(b) + np.testing.assert_array_equal(a_mask, b_mask) # pytest already? + + if strict: + # Compare all data values. + assertion(a.data, b.data, **kwargs) + else: + # Compare only unmasked data values. + assertion( + ma.compressed(a), + ma.compressed(b), + **kwargs, + ) + + +def assert_masked_array_equal(a, b, strict=False): + """Check that masked arrays are equal. This requires the + unmasked values and masks to be identical. + + Parameters + ---------- + a, b : array-like + Two arrays to compare. + strict : bool, optional + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. + + """ + _assert_masked_array(np.testing.assert_array_equal, a, b, strict) + + +def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): + """Check that masked arrays are almost equal. This requires the + masks to be identical, and the unmasked values to be almost + equal. + + Parameters + ---------- + a, b : array-like + Two arrays to compare. + strict : bool, optional + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. + decimal : int, optional, default=6 + Equality tolerance level for + :meth:`numpy.testing.assert_array_almost_equal`, with the meaning + 'abs(desired-actual) < 0.5 * 10**(-decimal)' + + """ + _assert_masked_array( + np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal + ) + + +def _assert_str_same( + reference_str, + test_str, + reference_filename, + type_comparison_name="Strings", +): + diff = "".join( + difflib.unified_diff( + reference_str.splitlines(1), + test_str.splitlines(1), + "Reference", + "Test result", + "", + "", + 0, + ) + ) + fail_string = ( + f"{type_comparison_name} do not match: {reference_filename}\n" f"{diff}" + ) + assert reference_str == test_str, fail_string + + +def get_data_path(relative_path): + """Return the absolute path to a data file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, str): + relative_path = os.path.join(*relative_path) + test_data_dir = iris.config.TEST_DATA_DIR + if test_data_dir is None: + test_data_dir = "" + data_path = os.path.join(test_data_dir, relative_path) + + if iris.tests._EXPORT_DATAPATHS_FILE is not None: + iris.tests._EXPORT_DATAPATHS_FILE.write(data_path + "\n") + + if isinstance(data_path, str) and not os.path.exists(data_path): + # if the file is gzipped, ungzip it and return the path of the ungzipped + # file. + gzipped_fname = data_path + ".gz" + if os.path.exists(gzipped_fname): + with gzip.open(gzipped_fname, "rb") as gz_fh: + try: + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + except IOError: + # Put ungzipped data file in a temporary path, since we + # can't write to the original path (maybe it is owned by + # the system.) + _, ext = os.path.splitext(data_path) + data_path = iris.util.create_temp_filename(suffix=ext) + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + + return data_path + + +def get_result_path(relative_path): + """Returns the absolute path to a result file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, str): + relative_path = os.path.join(*relative_path) + return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) + + +def _check_for_request_fixture(request, func_name: str): + """Raise an error if the first argument is not a pytest.FixtureRequest. + + Written to provide the clearest possible message for devs refactoring from + the deprecated IrisTest style tests. + """ + if not hasattr(request, "fixturenames"): + message = ( + f"{func_name}() expected: pytest.FixtureRequest instance, got: " + f"{request}" + ) + raise ValueError(message) + + +def result_path(request: pytest.FixtureRequest, basename=None, ext=""): + """Generate the path to a test result; from the calling file, class, method. + + Parameters + ---------- + request : pytest.FixtureRequest + A pytest ``request`` fixture passed down from the calling test. Is + interpreted for the automatic generation of a result path. See Examples + for how to access the ``request`` fixture. + basename : optional, default=None + File basename. If omitted, this is generated from the calling method. + ext : str, optional, default="" + Appended file extension. + + Examples + -------- + The PyTest ``request`` fixture is always available as a test argument: + + >>> def test_one(request): + ... path_one = (result_path(request)) + + """ + _check_for_request_fixture(request, "result_path") + + if __package__ != "iris.tests": + # Relying on this being the location so that we can derive the full + # path of the tests root. + # Would normally use assert, but this means something to PyTest. + message = "result_path() must be in the iris.tests root to function." + raise RuntimeError(message) + tests_root = Path(__file__).parent + + if ext and not ext.startswith("."): + ext = f".{ext}" + + def remove_test(string: str): + result = string + result = re.sub(r"(?i)test_", "", result) + result = re.sub(r"(?i)test", "", result) + return result + + # Generate the directory name from the calling file name. + output_path = get_result_path("") / request.path.relative_to(tests_root) + output_path = output_path.with_suffix("") + output_path = output_path.with_name(remove_test(output_path.name)) + + # Optionally add a class subdirectory if called from a class. + if request.cls is not None: + output_class = remove_test(request.cls.__name__) + output_path = output_path / output_class + + # Generate the file name from the calling function name. + node_name = request.node.originalname + if basename is not None: + output_func = basename + elif node_name == "": + output_func = "" + else: + output_func = remove_test(node_name) + output_path = output_path / output_func + + # Optionally use parameter values as the file name if parameterised. + # (The function becomes a subdirectory in this case). + if hasattr(request.node, "callspec"): + output_path = output_path / request.node.callspec.id + + output_path = output_path.with_suffix(ext) + + return str(output_path) + + +def assert_CML_approx_data( + request: pytest.FixtureRequest, cubes, reference_filename=None, **kwargs +): + # passes args and kwargs on to approx equal + # See result_path() Examples for how to access the ``request`` fixture. + + _check_for_request_fixture(request, "assert_CML_approx_data") + + if isinstance(cubes, iris.cube.Cube): + cubes = [cubes] + if reference_filename is None: + reference_filename = result_path(request, None, "cml") + reference_filename = [get_result_path(reference_filename)] + for i, cube in enumerate(cubes): + fname = list(reference_filename) + # don't want the ".cml" for the json stats file + if fname[-1].endswith(".cml"): + fname[-1] = fname[-1][:-4] + fname[-1] += ".data.%d.json" % i + assert_data_almost_equal(cube.data, fname, **kwargs) + assert_CML(request, cubes, reference_filename, checksum=False) + + +def assert_CDL( + request: pytest.FixtureRequest, netcdf_filename, reference_filename=None, flags="-h" +): + """Test that the CDL for the given netCDF file matches the contents + of the reference file. + + If the environment variable IRIS_TEST_CREATE_MISSING is + non-empty, the reference file is created if it doesn't exist. + + Parameters + ---------- + request : pytest.FixtureRequest + A pytest ``request`` fixture passed down from the calling test. Is + required by :func:`result_path`. See :func:`result_path` Examples + for how to access the ``request`` fixture. + netcdf_filename : + The path to the netCDF file. + reference_filename : optional, default=None + The relative path (relative to the test results directory). + If omitted, the result is generated from the calling + method's name, class, and module using + :meth:`iris.tests.IrisTest.result_path`. + flags : str, optional + Command-line flags for `ncdump`, as either a whitespace + separated string or an iterable. Defaults to '-h'. + + """ + _check_for_request_fixture(request, "assert_CDL") + + if reference_filename is None: + reference_path = result_path(request, None, "cdl") + else: + reference_path = get_result_path(reference_filename) + + # Convert the netCDF file to CDL file format. + if flags is None: + flags = [] + elif isinstance(flags, str): + flags = flags.split() + else: + flags = list(map(str, flags)) + + try: + exe_path = env_bin_path("ncdump") + args = [exe_path] + flags + [netcdf_filename] + cdl = subprocess.check_output(args, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as exc: + print(exc.output) + raise + + # Ingest the CDL for comparison, excluding first line. + lines = cdl.decode("ascii").splitlines() + lines = lines[1:] + + # Ignore any lines of the general form "... :_NCProperties = ..." + # (an extra global attribute, displayed by older versions of ncdump). + re_ncprop = re.compile(r"^\s*:_NCProperties *=") + lines = [line for line in lines if not re_ncprop.match(line)] + + # Sort the dimensions (except for the first, which can be unlimited). + # This gives consistent CDL across different platforms. + def sort_key(line): + return ("UNLIMITED" not in line, line) + + dimension_lines = slice(lines.index("dimensions:") + 1, lines.index("variables:")) + lines[dimension_lines] = sorted(lines[dimension_lines], key=sort_key) + cdl = "\n".join(lines) + "\n" # type: ignore[assignment] + + _check_same(cdl, reference_path, type_comparison_name="CDL") + + +def assert_CML( + request: pytest.FixtureRequest, cubes, reference_filename=None, checksum=True +): + """Test that the CML for the given cubes matches the contents of + the reference file. + + If the environment variable IRIS_TEST_CREATE_MISSING is + non-empty, the reference file is created if it doesn't exist. + + Parameters + ---------- + request : pytest.FixtureRequest + A pytest ``request`` fixture passed down from the calling test. Is + required by :func:`result_path`. See :func:`result_path` Examples + for how to access the ``request`` fixture. + cubes : + Either a Cube or a sequence of Cubes. + reference_filename : optional, default=None + The relative path (relative to the test results directory). + If omitted, the result is generated from the calling + method's name, class, and module using + :meth:`iris.tests.IrisTest.result_path`. + checksum : bool, optional + When True, causes the CML to include a checksum for each + Cube's data. Defaults to True. + + """ + _check_for_request_fixture(request, "assert_CML") + + if isinstance(cubes, iris.cube.Cube): + cubes = [cubes] + if reference_filename is None: + reference_filename = result_path(request, None, "cml") + + if isinstance(cubes, (list, tuple)): + xml = iris.cube.CubeList(cubes).xml( + checksum=checksum, order=False, byteorder=False + ) + else: + xml = cubes.xml(checksum=checksum, order=False, byteorder=False) + reference_path = get_result_path(reference_filename) + _check_same(xml, reference_path) + + +def assert_text_file(source_filename, reference_filename, desc="text file"): + """Check if two text files are the same, printing any diffs.""" + with open(source_filename) as source_file: + source_text = source_file.readlines() + with open(reference_filename) as reference_file: + reference_text = reference_file.readlines() + + diff = "".join( + difflib.unified_diff( + reference_text, + source_text, + "Reference", + "Test result", + "", + "", + 0, + ) + ) + fail_string = ( + f"{desc} does not match: reference file " f"{reference_filename} \n {diff}" + ) + assert reference_text == source_text, fail_string + + +def assert_data_almost_equal(data, reference_filename, **kwargs): + reference_path = get_result_path(reference_filename) + if _check_reference_file(reference_path): + kwargs.setdefault("err_msg", "Reference file %s" % reference_path) + with open(reference_path, "r") as reference_file: + stats = json.load(reference_file) + assert stats.get("shape", []) == list(data.shape) + assert stats.get("masked", False) == ma.is_masked(data) + nstats = np.array( + ( + stats.get("mean", 0.0), + stats.get("std", 0.0), + stats.get("max", 0.0), + stats.get("min", 0.0), + ), + dtype=np.float64, + ) + if math.isnan(stats.get("mean", 0.0)): + assert math.isnan(data.mean()) + else: + data_stats = np.array( + (data.mean(), data.std(), data.max(), data.min()), + dtype=np.float64, + ) + assert_array_all_close(nstats, data_stats, **kwargs) + else: + _ensure_folder(reference_path) + stats = collections.OrderedDict( + [ + ("std", np.float64(data.std())), + ("min", np.float64(data.min())), + ("max", np.float64(data.max())), + ("shape", data.shape), + ("masked", ma.is_masked(data)), + ("mean", np.float64(data.mean())), + ] + ) + with open(reference_path, "w") as reference_file: + reference_file.write(json.dumps(stats)) + + +def assert_files_equal(test_filename, reference_filename): + reference_path = get_result_path(reference_filename) + if _check_reference_file(reference_path): + fmt = "test file {!r} does not match reference {!r}." + assert filecmp.cmp(test_filename, reference_path) and fmt.format( + test_filename, reference_path + ) + else: + _ensure_folder(reference_path) + shutil.copy(test_filename, reference_path) + + +def assert_string(request: pytest.FixtureRequest, string, reference_filename=None): + """Test that `string` matches the contents of the reference file. + + If the environment variable IRIS_TEST_CREATE_MISSING is + non-empty, the reference file is created if it doesn't exist. + + Parameters + ---------- + request: pytest.FixtureRequest + A pytest ``request`` fixture passed down from the calling test. Is + required by :func:`result_path`. See :func:`result_path` Examples + for how to access the ``request`` fixture. + string : str + The string to check. + reference_filename : optional, default=None + The relative path (relative to the test results directory). + If omitted, the result is generated from the calling + method's name, class, and module using + :meth:`iris.tests.IrisTest.result_path`. + + """ + _check_for_request_fixture(request, "assert_string") + + if reference_filename is None: + reference_path = result_path(request, None, "txt") + else: + reference_path = get_result_path(reference_filename) + _check_same(string, reference_path, type_comparison_name="Strings") + + +def assert_repr(request: pytest.FixtureRequest, obj, reference_filename): + assert_string(request, repr(obj), reference_filename) + + +def _check_same(item, reference_path, type_comparison_name="CML"): + if _check_reference_file(reference_path): + with open(reference_path, "rb") as reference_fh: + reference = "".join(part.decode("utf-8") for part in reference_fh) + _assert_str_same(reference, item, reference_path, type_comparison_name) + else: + _ensure_folder(reference_path) + with open(reference_path, "wb") as reference_fh: + reference_fh.writelines(part.encode("utf-8") for part in item) + + +def assert_XML_element(obj, reference_filename): + """Calls the xml_element method given obj and asserts the result is the same as the test file.""" + doc = xml.dom.minidom.Document() + doc.appendChild(obj.xml_element(doc)) + # sort the attributes on xml elements before testing against known good state. + # this is to be compatible with stored test output where xml attrs are stored in alphabetical order, + # (which was default behaviour in python <3.8, but changed to insert order in >3.8) + doc = iris.cube.Cube._sort_xml_attrs(doc) + pretty_xml = doc.toprettyxml(indent=" ") + reference_path = get_result_path(reference_filename) + _check_same(pretty_xml, reference_path, type_comparison_name="XML") + + +def assert_array_equal(a, b, err_msg=""): + np.testing.assert_array_equal(a, b, err_msg=err_msg) + + +@contextlib.contextmanager +def _record_warning_matches(expected_regexp=""): + # Record warnings raised matching a given expression. + matches = [] + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + yield matches + messages = [str(warning.message) for warning in w] + expr = re.compile(expected_regexp) + matches.extend(message for message in messages if expr.search(message)) + + +@contextlib.contextmanager +def assert_logs(caplog, logger=None, level=None, msg_regex=None): + """If msg_regex is used, checks that the result is a single message of the specified + level, and that it matches this regex. + + Checks that there is at least one message logged at the given parameters, + but then *also* exercises the message formatters of all the logger's handlers, + just to check that there are no formatting errors. + + """ + with caplog.at_level(level, logger.name): + assert len(caplog.records) != 0 + # Check for any formatting errors by running all the formatters. + for record in caplog.records: + for handler in caplog.logger.handlers: + handler.format(record) + + # Check message, if requested. + if msg_regex: + assert len(caplog.records) == 1 + rec = caplog.records[0] + assert level == rec.levelname + assert re.match(msg_regex, rec.msg) + + +@contextlib.contextmanager +def assert_no_warnings_regexp(expected_regexp=""): + # Check that no warning matching the given expression is raised. + with _record_warning_matches(expected_regexp) as matches: + yield + + msg = "Unexpected warning(s) raised, matching '{}' : {!r}." + msg = msg.format(expected_regexp, matches) + assert not matches, msg + + +def assert_array_almost_equal(a, b, decimal=6): + np.testing.assert_array_almost_equal(a, b, decimal=decimal) + + +def assert_array_all_close(a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): + """Check arrays are equal, within given relative + absolute tolerances. + + Parameters + ---------- + a, b : array-like + Two arrays to compare. + rtol, atol : float, optional + Relative and absolute tolerances to apply. + + Other Parameters + ---------------- + Any additional kwargs are passed to numpy.testing.assert_allclose. + + Performs pointwise toleranced comparison, and raises an assertion if + the two are not equal 'near enough'. + For full details see underlying routine numpy.allclose. + + """ + # Handle the 'err_msg' kwarg, which is the only API difference + # between np.allclose and np.testing_assert_allclose. + msg = kwargs.pop("err_msg", None) + ok = np.allclose(a, b, rtol=rtol, atol=atol, **kwargs) + if not ok: + # Calculate errors above a pointwise tolerance : The method is + # taken from "numpy.core.numeric.isclose". + a, b = np.broadcast_arrays(a, b) + errors = np.abs(a - b) - atol + rtol * np.abs(b) + worst_inds = np.unravel_index(np.argmax(errors.flat), errors.shape) + + if msg is None: + # Build a more useful message than np.testing.assert_allclose. + msg = ( + '\nARRAY CHECK FAILED "assert_array_all_close" :' + "\n with shapes={} {}, atol={}, rtol={}" + "\n worst at element {} : a={} b={}" + "\n absolute error ~{:.3g}, equivalent to rtol ~{:.3e}" + ) + aval, bval = a[worst_inds], b[worst_inds] + absdiff = np.abs(aval - bval) + equiv_rtol = absdiff / bval + msg = msg.format( + a.shape, + b.shape, + atol, + rtol, + worst_inds, + aval, + bval, + absdiff, + equiv_rtol, + ) + + raise AssertionError(msg) + + +def file_checksum(file_path): + """Generate checksum from file.""" + with open(file_path, "rb") as in_file: + return zlib.crc32(in_file.read()) + + +def _check_reference_file(reference_path): + reference_exists = os.path.isfile(reference_path) + if not (reference_exists or os.environ.get("IRIS_TEST_CREATE_MISSING")): + msg = "Missing test result: {}".format(reference_path) + raise AssertionError(msg) + return reference_exists + + +def _ensure_folder(path): + dir_path = os.path.dirname(path) + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + +# todo: relied on unitest functionality, need to find a pytest alternative +def patch(*args, **kwargs): + """Install a mock.patch, to be removed after the current test. + + The patch is created with mock.patch(*args, **kwargs). + + Returns + ------- + The substitute object returned by patch.start(). + + Examples + -------- + :: + + mock_call = self.patch('module.Class.call', return_value=1) + module_Class_instance.call(3, 4) + self.assertEqual(mock_call.call_args_list, [mock.call(3, 4)]) + + """ + raise NotImplementedError() + + +def assert_array_shape_stats(result, shape, mean, std_dev, rtol=1e-6): + """Assert that the result, a cube, has the provided shape and that the + mean and standard deviation of the data array are also as provided. + Thus build confidence that a cube processing operation, such as a + cube.regrid, has maintained its behaviour. + + """ + assert result.shape == shape + assert_array_all_close(result.data.mean(), mean, rtol=rtol) + assert_array_all_close(result.data.std(), std_dev, rtol=rtol) + + +def assert_dict_equal(lhs, rhs): + """Dictionary Comparison. + + This allows us to cope with dictionary comparison where the value of a key + may be a numpy array. + """ + emsg = f"Provided LHS argument is not a 'Mapping', got {type(lhs)}." + assert isinstance(lhs, Mapping), emsg + + emsg = f"Provided RHS argument is not a 'Mapping', got {type(rhs)}." + assert isinstance(rhs, Mapping), emsg + + emsg = f"{lhs!r} != {rhs!r}." + assert set(lhs.keys()) == set(rhs.keys()), emsg + + for key in lhs.keys(): + lvalue, rvalue = lhs[key], rhs[key] + + if ma.isMaskedArray(lvalue) or ma.isMaskedArray(rvalue): + if not ma.isMaskedArray(lvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {ma.core.MaskedArray}." + ) + raise AssertionError(emsg) + + if not ma.isMaskedArray(rvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {ma.core.MaskedArray} and " + f"the RHS value has type {type(lvalue)}." + ) + raise AssertionError(emsg) + + assert_masked_array_equal(lvalue, rvalue) + elif isinstance(lvalue, np.ndarray) or isinstance(rvalue, np.ndarray): + if not isinstance(lvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {np.ndarray}." + ) + raise AssertionError(emsg) + + if not isinstance(rvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {np.ndarray} and " + f"the RHS value has type {type(rvalue)}." + ) + raise AssertionError(emsg) + + assert_array_equal(lvalue, rvalue) + else: + if lvalue != rvalue: + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"{lvalue!r} != {rvalue!r}." + ) + raise AssertionError(emsg) + + +def assert_equal_and_kind(value, expected): + # Check a value, and also its type 'kind' = float/integer/string. + assert value == expected + assert np.array(value).dtype.kind == np.array(expected).dtype.kind + + +@contextlib.contextmanager +def pp_cube_save_test( + reference_txt_path, + reference_cubes=None, + reference_pp_path=None, + **kwargs, +): + """A context manager for testing the saving of Cubes to PP files. + + Args: + + * reference_txt_path: + The path of the file containing the textual PP reference data. + + Kwargs: + + * reference_cubes: + The cube(s) from which the textual PP reference can be re-built if necessary. + * reference_pp_path: + The location of a PP file from which the textual PP reference can be re-built if necessary. + NB. The "reference_cubes" argument takes precedence over this argument. + + The return value from the context manager is the name of a temporary file + into which the PP data to be tested should be saved. + + Example:: + with self.pp_cube_save_test(reference_txt_path, reference_cubes=cubes) as temp_pp_path: + iris.save(cubes, temp_pp_path) + + """ + + def _create_reference_txt(txt_path, pp_path): + # Load the reference data + pp_fields = list(iris.fileformats.pp.load(pp_path)) + for pp_field in pp_fields: + pp_field.data + + # Clear any header words we don't use + unused = ("lbexp", "lbegin", "lbnrec", "lbproj", "lbtyp") + for pp_field in pp_fields: + for word_name in unused: + setattr(pp_field, word_name, 0) + + # Save the textual representation of the PP fields + with open(txt_path, "w") as txt_file: + txt_file.writelines(str(pp_fields)) + + # Watch out for a missing reference text file + if not os.path.isfile(reference_txt_path): + if reference_cubes: + temp_pp_path = iris.util.create_temp_filename(".pp") + try: + iris.save(reference_cubes, temp_pp_path, **kwargs) + _create_reference_txt(reference_txt_path, temp_pp_path) + finally: + os.remove(temp_pp_path) + elif reference_pp_path: + _create_reference_txt(reference_txt_path, reference_pp_path) + else: + raise ValueError("Missing all of reference txt file, cubes, and PP path.") + + temp_pp_path = iris.util.create_temp_filename(".pp") + try: + # This value is returned to the target of the "with" statement's "as" clause. + yield temp_pp_path + + # Load deferred data for all of the fields (but don't do anything with it) + pp_fields = list(iris.fileformats.pp.load(temp_pp_path)) + for pp_field in pp_fields: + pp_field.data + with open(reference_txt_path, "r") as reference_fh: + reference = "".join(reference_fh) + _assert_str_same( + reference + "\n", + str(pp_fields) + "\n", + reference_txt_path, + type_comparison_name="PP files", + ) + finally: + os.remove(temp_pp_path) + + +def skip_data(fn): + """Decorator to choose whether to run tests, based on the availability of + external data. + + Example usage: + @skip_data + class MyDataTests(tests.IrisTest): + ... + + """ + no_data = ( + not iris.config.TEST_DATA_DIR + or not os.path.isdir(iris.config.TEST_DATA_DIR) + or os.environ.get("IRIS_TEST_NO_DATA") + ) + + skip = pytest.mark.skipif( + condition=no_data, reason="Test(s) require external data." + ) + + return skip(fn) + + +def skip_gdal(fn): + """Decorator to choose whether to run tests, based on the availability of the + GDAL library. + + Example usage: + @skip_gdal + class MyGeoTiffTests(test.IrisTest): + ... + + """ + skip = pytest.mark.skipif( + condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'." + ) + return skip(fn) + + +skip_plot = graphics.skip_plot + +skip_sample_data = pytest.mark.skipif( + not SAMPLE_DATA_AVAILABLE, + reason=('Test(s) require "iris-sample-data", ' "which is not available."), +) + + +skip_nc_time_axis = pytest.mark.skipif( + not NC_TIME_AXIS_AVAILABLE, + reason='Test(s) require "nc_time_axis", which is not available.', +) + + +skip_inet = pytest.mark.skipif( + not INET_AVAILABLE, + reason=('Test(s) require an "internet connection", ' "which is not available."), +) + + +skip_stratify = pytest.mark.skipif( + not STRATIFY_AVAILABLE, + reason='Test(s) require "python-stratify", which is not available.', +) + + +def no_warnings(func): + """Provides a decorator to ensure that there are no warnings raised + within the test, otherwise the test will fail. + + """ + + @functools.wraps(func) + def wrapped(*args, **kwargs): + with pytest.mock.patch("warnings.warn") as warn: + result = func(*args, **kwargs) + assert 0 == warn.call_count, "Got unexpected warnings.\n{}".format( + warn.call_args_list + ) + return result + + return wrapped + + +def env_bin_path(exe_name: Optional[str] = None): + """Return a Path object for (an executable in) the environment bin directory. + + Parameters + ---------- + exe_name : str + If set, the name of an executable to append to the path. + + Returns + ------- + exe_path : Path + A path to the bin directory, or an executable file within it. + + Notes + ----- + For use in tests which spawn commands which should call executables within + the Python environment, since many IDEs (Eclipse, PyCharm) don't + automatically include this location in $PATH (as opposed to $PYTHONPATH). + """ + exe_path = Path(os.__file__) + exe_path = (exe_path / "../../../bin").resolve() + if exe_name is not None: + exe_path = exe_path / exe_name + return exe_path + + +class GraphicsTest: + """All inheriting classes automatically have access to ``self.check_graphic()``.""" + + @pytest.fixture(autouse=True) + def _get_check_graphics(self, check_graphic_caller): + self.check_graphic = check_graphic_caller diff --git a/lib/iris/tests/conftest.py b/lib/iris/tests/conftest.py new file mode 100644 index 0000000000..2a3341d8c0 --- /dev/null +++ b/lib/iris/tests/conftest.py @@ -0,0 +1,55 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Top-level fixture infra-structure. + +Before adding to this: consider if :mod:`iris.tests.unit.conftest` or +:mod:`iris.tests.integration.conftest` might be more appropriate. +""" + +from collections import defaultdict +from typing import Callable + +import pytest + +import iris.tests.graphics + + +@pytest.fixture(scope="session", autouse=True) +def test_call_counter(): + """Provide a session-persistent tracker of the number of calls per test name. + + Used by :func:`_unique_id` to ensure uniqueness if called multiple times + per test. + """ + counter = defaultdict(int) + return counter + + +@pytest.fixture +def _unique_id(request: pytest.FixtureRequest, test_call_counter) -> Callable: + """Provide a function returning a unique ID of calling test and call number. + + Example: ``iris.tests.unit.test_cube.TestCube.test_data.my_param.0`` + + Used by :func:`iris.tests.graphics.check_graphic_caller` to ensure unique + image names. + """ + id_sequence = [request.module.__name__, request.node.originalname] + if request.cls is not None: + id_sequence.insert(-1, request.cls.__name__) + if hasattr(request.node, "callspec"): + id_sequence.append(request.node.callspec.id) + test_id = ".".join(id_sequence) + + def generate_id(): + assertion_id = test_call_counter[test_id] + test_call_counter[test_id] += 1 + return f"{test_id}.{assertion_id}" + + return generate_id + + +# Share this existing fixture from the expected location. +check_graphic_caller = iris.tests.graphics._check_graphic_caller diff --git a/lib/iris/tests/graphics/README.md b/lib/iris/tests/graphics/README.md index 069fc01f70..b345843109 100644 --- a/lib/iris/tests/graphics/README.md +++ b/lib/iris/tests/graphics/README.md @@ -24,8 +24,9 @@ perceived as it may be a simple pixel shift. ## Testing Strategy -The `iris.tests.IrisTest.check_graphic` test routine calls out to -`iris.tests.graphics.check_graphic` which tests against the **acceptable** +The `iris.tests.graphics.check_graphic` function - accessed via the +`check_graphic_caller` fixture (PyTest) or `iris.tests.IrisTest.check_graphic` +(unittest) - tests against the **acceptable** result. It does this using an image **hash** comparison technique which allows us to be robust against minor variations based on underlying library updates. @@ -48,4 +49,4 @@ This consists of: * The utility script `iris/tests/idiff.py` automates checking, enabling the developer to easily compare the proposed new **acceptable** result image - against the existing accepted baseline image, for each failing test. \ No newline at end of file + against the existing accepted baseline image, for each failing test. diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 1fe199c8b7..7fb2074ca0 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -18,10 +18,10 @@ from pathlib import Path import sys import threading -from typing import Callable, Dict, Union -import unittest +from typing import Callable, Dict, Iterator, Union import filelock +import pytest # Test for availability of matplotlib. # (And remove matplotlib as an iris.tests dependency.) @@ -50,7 +50,7 @@ _DISPLAY_FIGURES = True # Threading non re-entrant blocking lock to ensure thread-safe plotting in the -# GraphicsTestMixin. +# GraphicsTestMixin and check_graphics_caller. _lock = threading.Lock() #: Default perceptual hash size. @@ -241,6 +241,7 @@ def _create_missing(phash: str) -> None: class GraphicsTestMixin: + # TODO: deprecate this in favour of check_graphic_caller. def setUp(self) -> None: # Acquire threading non re-entrant blocking lock to ensure # thread-safe plotting. @@ -263,15 +264,57 @@ def skip_plot(fn: Callable) -> Callable: """Decorator to choose whether to run tests, based on the availability of the matplotlib library. - Example usage: - @skip_plot - class MyPlotTests(test.GraphicsTest): - ... + Examples + -------- + >>> @skip_plot + >>> class TestMyPlots: + ... def test_my_plot(self, check_graphic_caller): + ... pass + ... + >>> @skip_plot + >>> def test_my_plot(check_graphic_caller): + ... pass """ - skip = unittest.skipIf( + skip = pytest.mark.skipIf( condition=not MPL_AVAILABLE, reason="Graphics tests require the matplotlib library.", ) return skip(fn) + + +@pytest.fixture +def _check_graphic_caller(_unique_id) -> Iterator[Callable]: + """Provide a function calling :func:`check_graphic` with safe configuration. + + Ensures a safe Matplotlib setup (and tears down afterwards), and generates + a unique test id for each call. + + Examples + -------- + >>> def test_my_plot(check_graphic_caller): + ... # ... do some plotting ... + ... check_graphic_caller() + """ + from iris.tests import _RESULT_PATH + + # Acquire threading non re-entrant blocking lock to ensure + # thread-safe plotting. + _lock.acquire() + # Make sure we have no unclosed plots from previous tests before + # generating this one. + if MPL_AVAILABLE: + plt.close("all") + + def call_check_graphic(): + check_graphic(_unique_id(), _RESULT_PATH) + + yield call_check_graphic + + # If a plotting test bombs out it can leave the current figure + # in an odd state, so we make sure it's been disposed of. + if MPL_AVAILABLE: + plt.close("all") + # Release the non re-entrant blocking lock. + _lock.release() diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py index 64d690e55d..cbd9d3b891 100755 --- a/lib/iris/tests/graphics/idiff.py +++ b/lib/iris/tests/graphics/idiff.py @@ -28,6 +28,7 @@ from iris.warnings import IrisIgnoringWarning # noqa import iris.tests # noqa +from iris.tests import _shared_utils import iris.tests.graphics as graphics # noqa # Allows restoration of test id from result image name @@ -118,7 +119,7 @@ def step_over_diffs(result_dir, display=True): for fname in result_dir.glob(f"*{_POSTFIX_DIFF}"): fname.unlink() - reference_image_dir = Path(iris.tests.get_data_path("images")) + reference_image_dir = Path(_shared_utils.get_data_path("images")) repo = graphics.read_repo_json() # Filter out all non-test result image files. diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py index 5261f0cc29..ca2f65279f 100755 --- a/lib/iris/tests/graphics/recreate_imagerepo.py +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -10,7 +10,7 @@ from imagehash import hex_to_hash -import iris.tests +from iris.tests import _shared_utils import iris.tests.graphics as graphics @@ -47,7 +47,7 @@ def update_json(baseline_image_dir: Path, dry_run: bool = False): if __name__ == "__main__": - default_baseline_image_dir = Path(iris.tests.IrisTest.get_data_path("images")) + default_baseline_image_dir = Path(_shared_utils.get_data_path("images")) description = ( "Update imagerepo.json based on contents of the baseline image directory" ) diff --git a/lib/iris/tests/integration/plot/test_animate.py b/lib/iris/tests/integration/plot/test_animate.py index 4afee0c463..59f269e9f7 100644 --- a/lib/iris/tests/integration/plot/test_animate.py +++ b/lib/iris/tests/integration/plot/test_animate.py @@ -4,24 +4,22 @@ # See LICENSE in the root of the repository for full licensing details. """Integration tests for :func:`iris.plot.animate`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest import iris from iris.coord_systems import GeogCS +from iris.tests import _shared_utils # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot -class IntegrationTest(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +class IntegrationTest(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): cube = iris.cube.Cube(np.arange(36, dtype=np.int32).reshape((3, 3, 4))) cs = GeogCS(6371229) @@ -68,7 +66,3 @@ def test_cube_animation(self): for anim, d in zip(ani, data): anim._draw_next_frame(d, blit=False) self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index 4a3fd27a80..e02e51db78 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -7,26 +7,24 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.coords import AuxCoord +from iris.tests import _shared_utils import iris.tests.stock # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt from iris.plot import contour, contourf, pcolor, pcolormesh, points, scatter -@tests.skip_plot -class TestColorBarCreation(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +class TestColorBarCreation(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.draw_functions = (contour, contourf, pcolormesh, pcolor) self.cube = iris.tests.stock.lat_lon_cube() self.cube.coord("longitude").guess_bounds() @@ -46,49 +44,37 @@ def test_common_draw_functions(self): for draw_function in self.draw_functions: mappable = draw_function(self.cube) cbar = plt.colorbar() - self.assertIs( - cbar.mappable, - mappable, - msg="Problem with draw function iris.plot.{}".format( - draw_function.__name__ - ), - ) + assert ( + cbar.mappable is mappable + ), "Problem with draw function iris.plot.{}".format(draw_function.__name__) def test_common_draw_functions_specified_mappable(self): for draw_function in self.draw_functions: mappable_initial = draw_function(self.cube, cmap="cool") _ = draw_function(self.cube) cbar = plt.colorbar(mappable_initial) - self.assertIs( - cbar.mappable, - mappable_initial, - msg="Problem with draw function iris.plot.{}".format( - draw_function.__name__ - ), - ) + assert ( + cbar.mappable is mappable_initial + ), "Problem with draw function iris.plot.{}".format(draw_function.__name__) def test_points_with_c_kwarg(self): mappable = points(self.cube, c=self.cube.data) cbar = plt.colorbar() - self.assertIs(cbar.mappable, mappable) + assert cbar.mappable is mappable def test_points_with_c_kwarg_specified_mappable(self): mappable_initial = points(self.cube, c=self.cube.data, cmap="cool") _ = points(self.cube, c=self.cube.data) cbar = plt.colorbar(mappable_initial) - self.assertIs(cbar.mappable, mappable_initial) + assert cbar.mappable is mappable_initial def test_scatter_with_c_kwarg(self): mappable = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points) cbar = plt.colorbar() - self.assertIs(cbar.mappable, mappable) + assert cbar.mappable is mappable def test_scatter_with_c_kwarg_specified_mappable(self): mappable_initial = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points) _ = scatter(self.traj_lon, self.traj_lat, c=self.traj_lon.points, cmap="cool") cbar = plt.colorbar(mappable_initial) - self.assertIs(cbar.mappable, mappable_initial) - - -if __name__ == "__main__": - tests.main() + assert cbar.mappable is mappable_initial diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py index 750de9fdf3..408c04cb3b 100644 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ b/lib/iris/tests/integration/plot/test_netcdftime.py @@ -4,24 +4,21 @@ # See LICENSE in the root of the repository for full licensing details. """Test plot of time coord with non-standard calendar.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - from cf_units import Unit import cftime import numpy as np from iris.coords import AuxCoord +from iris.tests import _shared_utils # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_nc_time_axis -@tests.skip_plot -class Test(tests.GraphicsTest): +@_shared_utils.skip_nc_time_axis +@_shared_utils.skip_plot +class Test(_shared_utils.GraphicsTest): def test_360_day_calendar(self): n = 360 calendar = "360_day" @@ -44,8 +41,4 @@ def test_360_day_calendar(self): expected_ydata = times (line1,) = iplt.plot(time_coord) result_ydata = line1.get_ydata() - self.assertArrayEqual(expected_ydata, result_ydata) - - -if __name__ == "__main__": - tests.main() + _shared_utils.assert_array_equal(expected_ydata, result_ydata) diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py index cb119f5b27..6e2241863e 100644 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ b/lib/iris/tests/integration/plot/test_nzdateline.py @@ -4,33 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Test set up of limited area map extents which bridge the date line.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import iris +from iris.tests import _shared_utils # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt from iris.plot import pcolormesh -@tests.skip_plot -@tests.skip_data -class TestExtent(tests.IrisTest): +@_shared_utils.skip_plot +@_shared_utils.skip_data +class TestExtent: def test_dateline(self): - dpath = tests.get_data_path(["PP", "nzgust.pp"]) + dpath = _shared_utils.get_data_path(["PP", "nzgust.pp"]) cube = iris.load_cube(dpath) pcolormesh(cube) # Ensure that the limited area expected for NZ is set. # This is set in longitudes with the datum set to the # International Date Line. - self.assertTrue( - -10 < plt.gca().get_xlim()[0] < -5 and 5 < plt.gca().get_xlim()[1] < 10 - ) - - -if __name__ == "__main__": - tests.main() + assert -10 < plt.gca().get_xlim()[0] < -5 + assert 5 < plt.gca().get_xlim()[1] < 10 diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py index 43cd051f46..f4e23fad7b 100644 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ b/lib/iris/tests/integration/plot/test_plot_2d_coords.py @@ -4,10 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test plots with two dimensional coordinates.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import cartopy.crs as ccrs import matplotlib.pyplot as plt import numpy as np @@ -16,22 +12,23 @@ from iris.analysis.cartography import unrotate_pole from iris.coords import AuxCoord from iris.cube import Cube +from iris.tests import _shared_utils # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_data +@_shared_utils.skip_data def simple_cube_w_2d_coords(): - path = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) + path = _shared_utils.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) cube = iris.load_cube(path) return cube -@tests.skip_plot -@tests.skip_data -class Test(tests.GraphicsTest): +@_shared_utils.skip_plot +@_shared_utils.skip_data +class Test(_shared_utils.GraphicsTest): def test_2d_coord_bounds_platecarree(self): # To avoid a problem with Cartopy smearing the data where the # longitude wraps, we set the central_longitude. @@ -56,8 +53,8 @@ def test_2d_coord_bounds_northpolarstereo(self): self.check_graphic() -@tests.skip_plot -class Test2dContour(tests.GraphicsTest): +@_shared_utils.skip_plot +class Test2dContour(_shared_utils.GraphicsTest): def test_2d_coords_contour(self): ny, nx = 4, 6 x1 = np.linspace(-20, 70, nx) @@ -77,7 +74,3 @@ def test_2d_coords_contour(self): ax.gridlines(draw_labels=True) ax.set_extent((0, 180, 0, 90)) self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py index 5419dc182f..0f8ac11a2d 100644 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ b/lib/iris/tests/integration/plot/test_vector_plots.py @@ -4,26 +4,24 @@ # See LICENSE in the root of the repository for full licensing details. """Test some key usages of :func:`iris.plot.quiver`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import cartopy.crs as ccrs import numpy as np +import pytest from iris.coord_systems import Mercator from iris.coords import AuxCoord, DimCoord from iris.cube import Cube +from iris.tests import _shared_utils from iris.tests.stock import sample_2d_latlons # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt from iris.plot import barbs, quiver -@tests.skip_plot +@_shared_utils.skip_plot class MixinVectorPlotCases: """Test examples mixin, used by separate barb, quiver + streamplot classes. @@ -147,7 +145,7 @@ def test_fail_unsupported_coord_system(self): r"Can only plot .* lat-lon projection, .* " r"This .* translates as Cartopy \+proj=merc .*" ) - with self.assertRaisesRegex(ValueError, re_msg): + with pytest.raises(ValueError, match=re_msg): self.plot("2d_rotated", u_cube, v_cube, coords=("longitude", "latitude")) def test_circular_longitude(self): @@ -178,10 +176,7 @@ def test_circular_longitude(self): self.plot("circular", u_cube, v_cube, coords=("longitude", "latitude")) -class TestBarbs(MixinVectorPlotCases, tests.GraphicsTest): - def setUp(self): - super().setUp() - +class TestBarbs(MixinVectorPlotCases, _shared_utils.GraphicsTest): @staticmethod def _nonlatlon_xyuv(): # Increase the range of wind speeds used in the barbs test to test more @@ -206,13 +201,6 @@ def plot_function_to_test(self): return barbs -class TestQuiver(MixinVectorPlotCases, tests.GraphicsTest): - def setUp(self): - super().setUp() - +class TestQuiver(MixinVectorPlotCases, _shared_utils.GraphicsTest): def plot_function_to_test(self): return quiver - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml b/lib/iris/tests/results/unit/util/mask_cube/CubeMask/mask_cube_2d_create_new_dim.cml similarity index 100% rename from lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml rename to lib/iris/tests/results/unit/util/mask_cube/CubeMask/mask_cube_2d_create_new_dim.cml diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index 60a9018c09..b0a90ccdaf 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -3,23 +3,22 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import unittest - import numpy as np import numpy.ma as ma +import pytest import iris import iris.analysis import iris.coord_systems import iris.coords +from iris.tests import _shared_utils -class TestAggregateBy(tests.IrisTest): - def setUp(self): +class TestAggregateBy: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + # # common # @@ -366,47 +365,67 @@ def setUp(self): def test_single(self): # mean group-by with single coordinate name. aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) # mean group-by with single coordinate. aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.MEAN ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_expected + ) # rms group-by with single coordinate name. aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.RMS) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single_rms.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "single_rms.cml"), ) # rms group-by with single coordinate. aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.RMS ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "single_rms.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "single_rms.cml"), ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_rms_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_rms_expected + ) def test_str_aggregation_single_weights_none(self): # mean group-by with single coordinate name. aggregateby_cube = self.cube_single.aggregated_by( "height", iris.analysis.MEAN, weights=None ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_expected + ) def test_coord_aggregation_single_weights_none(self): # mean group-by with single coordinate. aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.MEAN, weights=None ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "single.cml")) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single.cml") + ) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_expected + ) def test_weighted_single(self): # weighted mean group-by with single coordinate name. @@ -416,7 +435,8 @@ def test_weighted_single(self): weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single.cml"), ) @@ -427,11 +447,12 @@ def test_weighted_single(self): iris.analysis.MEAN, weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single.cml"), ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, self.weighted_single_expected, ) @@ -443,7 +464,8 @@ def test_single_shared(self): # group-by with single coordinate name on shared axis. aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single_shared.cml"), ) @@ -452,12 +474,15 @@ def test_single_shared(self): aggregateby_cube = self.cube_single.aggregated_by( self.coord_z_single, iris.analysis.MEAN ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single_shared.cml"), ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_expected + ) def test_weighted_single_shared(self): z2_points = np.arange(36, dtype=np.int32) @@ -470,7 +495,8 @@ def test_weighted_single_shared(self): iris.analysis.MEAN, weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single_shared.cml"), ) @@ -481,11 +507,12 @@ def test_weighted_single_shared(self): iris.analysis.MEAN, weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single_shared.cml"), ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, self.weighted_single_expected ) @@ -498,7 +525,8 @@ def test_single_shared_circular(self): # group-by with single coordinate name on shared axis. aggregateby_cube = self.cube_single.aggregated_by("height", iris.analysis.MEAN) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single_shared_circular.cml"), ) @@ -506,11 +534,14 @@ def test_single_shared_circular(self): # group-by with single coordinate on shared axis. coord = self.cube_single.coords("height") aggregateby_cube = self.cube_single.aggregated_by(coord, iris.analysis.MEAN) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single_shared_circular.cml"), ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.single_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.single_expected + ) def test_weighted_single_shared_circular(self): points = np.arange(36) * 10.0 @@ -525,7 +556,8 @@ def test_weighted_single_shared_circular(self): iris.analysis.MEAN, weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ( "analysis", @@ -541,7 +573,8 @@ def test_weighted_single_shared_circular(self): iris.analysis.MEAN, weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ( "analysis", @@ -549,7 +582,7 @@ def test_weighted_single_shared_circular(self): "weighted_single_shared_circular.cml", ), ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, self.weighted_single_expected, ) @@ -559,27 +592,37 @@ def test_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( ["height", "level"], iris.analysis.MEAN ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") + ) # group-by with multiple coordinate names (different order). aggregateby_cube = self.cube_multi.aggregated_by( ["level", "height"], iris.analysis.MEAN ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") + ) # group-by with multiple coordinates. aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z1_multi, self.coord_z2_multi], iris.analysis.MEAN ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") + ) # group-by with multiple coordinates (different order). aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z2_multi, self.coord_z1_multi], iris.analysis.MEAN ) - self.assertCML(aggregateby_cube, ("analysis", "aggregated_by", "multi.cml")) + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "multi.cml") + ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.multi_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.multi_expected + ) def test_weighted_multi(self): # weighted group-by with multiple coordinate names. @@ -588,7 +631,8 @@ def test_weighted_multi(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi.cml"), ) @@ -599,7 +643,8 @@ def test_weighted_multi(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi.cml"), ) @@ -610,7 +655,8 @@ def test_weighted_multi(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi.cml"), ) @@ -621,11 +667,12 @@ def test_weighted_multi(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi.cml"), ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, self.weighted_multi_expected, ) @@ -643,8 +690,10 @@ def test_multi_shared(self): aggregateby_cube = self.cube_multi.aggregated_by( ["height", "level"], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi_shared.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "multi_shared.cml"), ) # group-by with multiple coordinate names on shared axis (different @@ -652,27 +701,35 @@ def test_multi_shared(self): aggregateby_cube = self.cube_multi.aggregated_by( ["level", "height"], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi_shared.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "multi_shared.cml"), ) # group-by with multiple coordinates on shared axis. aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z1_multi, self.coord_z2_multi], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi_shared.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "multi_shared.cml"), ) # group-by with multiple coordinates on shared axis (different order). aggregateby_cube = self.cube_multi.aggregated_by( [self.coord_z2_multi, self.coord_z1_multi], iris.analysis.MEAN ) - self.assertCML( - aggregateby_cube, ("analysis", "aggregated_by", "multi_shared.cml") + _shared_utils.assert_CML( + self.request, + aggregateby_cube, + ("analysis", "aggregated_by", "multi_shared.cml"), ) - np.testing.assert_almost_equal(aggregateby_cube.data, self.multi_expected) + _shared_utils.assert_array_almost_equal( + aggregateby_cube.data, self.multi_expected + ) def test_weighted_multi_shared(self): z3_points = np.arange(20, dtype=np.int32) @@ -689,7 +746,8 @@ def test_weighted_multi_shared(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) @@ -701,7 +759,8 @@ def test_weighted_multi_shared(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) @@ -712,7 +771,8 @@ def test_weighted_multi_shared(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) @@ -724,11 +784,12 @@ def test_weighted_multi_shared(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, self.weighted_multi_expected, ) @@ -738,18 +799,19 @@ def test_easy(self): # Easy mean aggregate test by each coordinate. # aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.MEAN) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[8.0, 15.0], [10.0, 17.0], [15.0, 8.0]], dtype=np.float32), ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "easy.cml"), ) aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.MEAN) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[7.0, 11.0, 13.0, 19.0], [18.0, 12.0, 10.0, 6.0]], @@ -761,13 +823,13 @@ def test_easy(self): # Easy max aggregate test by each coordinate. # aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.MAX) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[10.0, 18.0], [12.0, 20.0], [18.0, 10.0]], dtype=np.float32), ) aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.MAX) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[8.0, 12.0, 14.0, 20.0], [18.0, 12.0, 10.0, 6.0]], @@ -779,13 +841,13 @@ def test_easy(self): # Easy sum aggregate test by each coordinate. # aggregateby_cube = self.cube_easy.aggregated_by("longitude", iris.analysis.SUM) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[16.0, 30.0], [20.0, 34.0], [30.0, 16.0]], dtype=np.float32), ) aggregateby_cube = self.cube_easy.aggregated_by("latitude", iris.analysis.SUM) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[14.0, 22.0, 26.0, 38.0], [18.0, 12.0, 10.0, 6.0]], @@ -799,7 +861,7 @@ def test_easy(self): aggregateby_cube = self.cube_easy.aggregated_by( "longitude", iris.analysis.PERCENTILE, percent=25 ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[7.0, 13.5], [9.0, 15.5], [13.5, 7.0]], dtype=np.float32), ) @@ -807,7 +869,7 @@ def test_easy(self): aggregateby_cube = self.cube_easy.aggregated_by( "latitude", iris.analysis.PERCENTILE, percent=25 ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[6.5, 10.5, 12.5, 18.5], [18.0, 12.0, 10.0, 6.0]], @@ -824,7 +886,7 @@ def test_easy(self): list(np.sqrt([104.0, 298.0])), list(np.sqrt([234.0, 68.0])), ] - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array(row, dtype=np.float32) ) @@ -833,7 +895,7 @@ def test_easy(self): list(np.sqrt([50.0, 122.0, 170.0, 362.0])), [18.0, 12.0, 10.0, 6.0], ] - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array(row, dtype=np.float32) ) @@ -852,11 +914,12 @@ def test_weighted_easy(self): "longitude", iris.analysis.MEAN, weights=lon_weights ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_easy.cml"), ) @@ -866,7 +929,7 @@ def test_weighted_easy(self): iris.analysis.MEAN, weights=lat_weights, ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], @@ -880,7 +943,7 @@ def test_weighted_easy(self): aggregateby_cube = self.cube_easy_weighted.aggregated_by( "longitude", iris.analysis.SUM, weights=lon_weights ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[3.0, 16.0], [2.0, 8.0]], dtype=np.float32), ) @@ -890,7 +953,7 @@ def test_weighted_easy(self): iris.analysis.SUM, weights=lat_weights, ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[6.0, 10.0, 14.0, 18.0], [0.0, 4.0, 8.0, 12.0]], @@ -909,7 +972,7 @@ def test_weighted_easy(self): percent=50, weights=lon_weights, ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), ) @@ -920,7 +983,7 @@ def test_weighted_easy(self): aggregateby_cube = self.cube_easy_weighted.aggregated_by( "longitude", iris.analysis.RMS, weights=lon_weights ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array([[3.0, np.sqrt(65.0)], [np.sqrt(0.4), 4.0]], dtype=np.float32), ) @@ -928,7 +991,7 @@ def test_weighted_easy(self): aggregateby_cube = self.cube_easy_weighted.aggregated_by( "latitude", iris.analysis.RMS, weights=lat_weights ) - np.testing.assert_almost_equal( + _shared_utils.assert_array_almost_equal( aggregateby_cube.data, np.array( [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], @@ -986,11 +1049,14 @@ def test_single_missing(self): "height", iris.analysis.MEAN ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "single_missing.cml"), ) - self.assertMaskedArrayAlmostEqual(aggregateby_cube.data, single_expected) + _shared_utils.assert_masked_array_almost_equal( + aggregateby_cube.data, single_expected + ) def test_weighted_single_missing(self): # weighted aggregation correctly handles masked data @@ -1044,11 +1110,12 @@ def test_weighted_single_missing(self): weights=self.weights_single, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single_missing.cml"), ) - self.assertMaskedArrayAlmostEqual( + _shared_utils.assert_masked_array_almost_equal( aggregateby_cube.data, weighted_single_expected, ) @@ -1108,11 +1175,14 @@ def test_multi_missing(self): ["height", "level"], iris.analysis.MEAN ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "multi_missing.cml"), ) - self.assertMaskedArrayAlmostEqual(aggregateby_cube.data, multi_expected) + _shared_utils.assert_masked_array_almost_equal( + aggregateby_cube.data, multi_expected + ) def test_weighted_multi_missing(self): # weighted aggregation correctly handles masked data @@ -1170,11 +1240,12 @@ def test_weighted_multi_missing(self): iris.analysis.MEAN, weights=self.weights_multi, ) - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi_missing.cml"), ) - self.assertMaskedArrayAlmostEqual( + _shared_utils.assert_masked_array_almost_equal( aggregateby_cube.data, weighted_multi_expected, ) @@ -1186,10 +1257,11 @@ def test_returned_true_single(self): returned=True, weights=self.weights_single, ) - self.assertTrue(isinstance(aggregateby_output, tuple)) + assert isinstance(aggregateby_output, tuple) aggregateby_cube = aggregateby_output[0] - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_single.cml"), ) @@ -1207,7 +1279,7 @@ def test_returned_true_single(self): [[8.0, 8.0, 8.0], [8.0, 8.0, 8.0], [8.0, 8.0, 8.0]], ] ) - np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + _shared_utils.assert_array_almost_equal(aggregateby_weights, expected_weights) def test_returned_true_multi(self): aggregateby_output = self.cube_multi.aggregated_by( @@ -1216,10 +1288,11 @@ def test_returned_true_multi(self): returned=True, weights=self.weights_multi, ) - self.assertTrue(isinstance(aggregateby_output, tuple)) + assert isinstance(aggregateby_output, tuple) aggregateby_cube = aggregateby_output[0] - self.assertCML( + _shared_utils.assert_CML( + self.request, aggregateby_cube, ("analysis", "aggregated_by", "weighted_multi.cml"), ) @@ -1238,10 +1311,10 @@ def test_returned_true_multi(self): [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], ] ) - np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + _shared_utils.assert_array_almost_equal(aggregateby_weights, expected_weights) def test_returned_fails_with_non_weighted_aggregator(self): - self.assertRaises( + pytest.raises( TypeError, self.cube_single.aggregated_by, "height", @@ -1250,7 +1323,7 @@ def test_returned_fails_with_non_weighted_aggregator(self): ) def test_weights_fail_with_non_weighted_aggregator(self): - self.assertRaises( + pytest.raises( TypeError, self.cube_single.aggregated_by, "height", @@ -1267,9 +1340,9 @@ def test_weights_fail_with_non_weighted_aggregator(self): class TestAggregateByWeightedByCube(TestAggregateBy): - def setUp(self): - super().setUp() - + @pytest.fixture(autouse=True) + def _setup_subclass(self, _setup): + # Requests _setup to ensure this fixture runs AFTER _setup. self.weights_single = self.cube_single[:, 0, 0].copy(self.weights_single) self.weights_single.units = "m2" self.weights_multi = self.cube_multi[:, 0, 0].copy(self.weights_multi) @@ -1281,7 +1354,7 @@ def test_str_aggregation_weighted_sum_single(self): iris.analysis.SUM, weights=self.weights_single, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_weighted_sum_single(self): aggregateby_cube = self.cube_single.aggregated_by( @@ -1289,7 +1362,7 @@ def test_coord_aggregation_weighted_sum_single(self): iris.analysis.SUM, weights=self.weights_single, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_str_aggregation_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -1297,7 +1370,7 @@ def test_str_aggregation_weighted_sum_multi(self): iris.analysis.SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_str_aggregation_rev_order_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -1305,7 +1378,7 @@ def test_str_aggregation_rev_order_weighted_sum_multi(self): iris.analysis.SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -1313,7 +1386,7 @@ def test_coord_aggregation_weighted_sum_multi(self): iris.analysis.SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_rev_order_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -1321,11 +1394,12 @@ def test_coord_aggregation_rev_order_weighted_sum_multi(self): iris.analysis.SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" -class TestAggregateByWeightedByObj(tests.IrisTest): - def setUp(self): +class TestAggregateByWeightedByObj: + @pytest.fixture(autouse=True) + def _setup(self): self.dim_coord = iris.coords.DimCoord( [0, 1, 2], standard_name="latitude", units="degrees" ) @@ -1352,58 +1426,54 @@ def test_weighting_with_str_dim_coord(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights="latitude" ) - np.testing.assert_array_equal(res_cube.data, [0, 8]) - self.assertEqual(res_cube.units, "K degrees") + _shared_utils.assert_array_equal(res_cube.data, [0, 8]) + assert res_cube.units == "K degrees" def test_weighting_with_str_aux_coord(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights="auxcoord" ) - np.testing.assert_array_equal(res_cube.data, [0, 5]) - self.assertEqual(res_cube.units, "K kg") + _shared_utils.assert_array_equal(res_cube.data, [0, 5]) + assert res_cube.units == "K kg" def test_weighting_with_str_cell_measure(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights="cell_area" ) - np.testing.assert_array_equal(res_cube.data, [0, 0]) - self.assertEqual(res_cube.units, "K m2") + _shared_utils.assert_array_equal(res_cube.data, [0, 0]) + assert res_cube.units == "K m2" def test_weighting_with_str_ancillary_variable(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights="ancvar" ) - np.testing.assert_array_equal(res_cube.data, [1, 5]) - self.assertEqual(res_cube.units, "K kg") + _shared_utils.assert_array_equal(res_cube.data, [1, 5]) + assert res_cube.units == "K kg" def test_weighting_with_dim_coord(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights=self.dim_coord ) - np.testing.assert_array_equal(res_cube.data, [0, 8]) - self.assertEqual(res_cube.units, "K degrees") + _shared_utils.assert_array_equal(res_cube.data, [0, 8]) + assert res_cube.units == "K degrees" def test_weighting_with_aux_coord(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights=self.aux_coord ) - np.testing.assert_array_equal(res_cube.data, [0, 5]) - self.assertEqual(res_cube.units, "K kg") + _shared_utils.assert_array_equal(res_cube.data, [0, 5]) + assert res_cube.units == "K kg" def test_weighting_with_cell_measure(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights=self.cell_measure ) - np.testing.assert_array_equal(res_cube.data, [0, 0]) - self.assertEqual(res_cube.units, "K m2") + _shared_utils.assert_array_equal(res_cube.data, [0, 0]) + assert res_cube.units == "K m2" def test_weighting_with_ancillary_variable(self): res_cube = self.cube.aggregated_by( "auxcoord", iris.analysis.SUM, weights=self.ancillary_variable ) - np.testing.assert_array_equal(res_cube.data, [1, 5]) - self.assertEqual(res_cube.units, "K kg") - - -if __name__ == "__main__": - unittest.main() + _shared_utils.assert_array_equal(res_cube.data, [1, 5]) + assert res_cube.units == "K kg" diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index a8446034be..9e0bf76d34 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -3,10 +3,6 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. - -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - import cf_units import dask.array as da import numpy as np @@ -20,12 +16,17 @@ import iris.coord_systems import iris.coords import iris.cube +from iris.tests import _shared_utils, stock import iris.tests.stock import iris.util -class TestAnalysisCubeCoordComparison(tests.IrisTest): - def assertComparisonDict(self, comparison_dict, reference_filename): +class TestAnalysisCubeCoordComparison: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + + def assert_comparison_dict(self, comparison_dict, reference_filename): string = "" for key in sorted(comparison_dict): coord_groups = comparison_dict[key] @@ -36,7 +37,7 @@ def assertComparisonDict(self, comparison_dict, reference_filename): ] string += str(sorted(names)) string += "\n" - self.assertString(string, reference_filename) + _shared_utils.assert_string(self.request, string, reference_filename) def test_coord_comparison(self): cube1 = iris.cube.Cube(np.zeros((41, 41))) @@ -109,67 +110,71 @@ def test_coord_comparison(self): coord_comparison = iris.analysis._dimensional_metadata_comparison - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube1), ("analysis", "coord_comparison", "cube1_cube1.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube2), ("analysis", "coord_comparison", "cube1_cube2.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube3), ("analysis", "coord_comparison", "cube1_cube3.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube4), ("analysis", "coord_comparison", "cube1_cube4.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube5), ("analysis", "coord_comparison", "cube1_cube5.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube2, cube3), ("analysis", "coord_comparison", "cube2_cube3.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube2, cube4), ("analysis", "coord_comparison", "cube2_cube4.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube2, cube5), ("analysis", "coord_comparison", "cube2_cube5.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube3, cube4), ("analysis", "coord_comparison", "cube3_cube4.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube3, cube5), ("analysis", "coord_comparison", "cube3_cube5.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube4, cube5), ("analysis", "coord_comparison", "cube4_cube5.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube1, cube1), ("analysis", "coord_comparison", "cube1_cube1_cube1.txt"), ) - self.assertComparisonDict( + self.assert_comparison_dict( coord_comparison(cube1, cube2, cube1), ("analysis", "coord_comparison", "cube1_cube2_cube1.txt"), ) # get a coord comparison result and check that we are getting back what was expected coord_group = coord_comparison(cube1, cube2)["grouped_coords"][0] - self.assertIsInstance(coord_group, iris.analysis._CoordGroup) - self.assertIsInstance(list(coord_group)[0], iris.coords.Coord) + assert isinstance(coord_group, iris.analysis._CoordGroup) + assert isinstance(list(coord_group)[0], iris.coords.Coord) -class TestAnalysisWeights(tests.IrisTest): +class TestAnalysisWeights: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def test_weighted_mean_little(self): data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32) weights = np.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=np.float32) @@ -196,7 +201,9 @@ def test_weighted_mean_little(self): ), 1, ) - self.assertCML(cube, ("analysis", "weighted_mean_source.cml")) + _shared_utils.assert_CML( + self.request, cube, ("analysis", "weighted_mean_source.cml") + ) a = cube.collapsed("lat", iris.analysis.MEAN, weights=weights) # np.ma.average doesn't apply type promotion rules in some versions, @@ -206,15 +213,19 @@ def test_weighted_mean_little(self): if a.dtype > np.float32: cast_data = a.data.astype(np.float32) a.data = cast_data - self.assertCMLApproxData(a, ("analysis", "weighted_mean_lat.cml")) + _shared_utils.assert_CML_approx_data( + self.request, a, ("analysis", "weighted_mean_lat.cml") + ) b = cube.collapsed(lon_coord, iris.analysis.MEAN, weights=weights) if b.dtype > np.float32: cast_data = b.data.astype(np.float32) b.data = cast_data b.data = np.asarray(b.data) - self.assertCMLApproxData(b, ("analysis", "weighted_mean_lon.cml")) - self.assertEqual(b.coord("dummy").shape, (1,)) + _shared_utils.assert_CML_approx_data( + self.request, b, ("analysis", "weighted_mean_lon.cml") + ) + assert b.coord("dummy").shape == (1,) # test collapsing multiple coordinates (and the fact that one of the coordinates isn't the same coordinate instance as on the cube) c = cube.collapsed( @@ -223,22 +234,26 @@ def test_weighted_mean_little(self): if c.dtype > np.float32: cast_data = c.data.astype(np.float32) c.data = cast_data - self.assertCMLApproxData(c, ("analysis", "weighted_mean_latlon.cml")) - self.assertEqual(c.coord("dummy").shape, (1,)) + _shared_utils.assert_CML_approx_data( + self.request, c, ("analysis", "weighted_mean_latlon.cml") + ) + assert c.coord("dummy").shape == (1,) # Check new coord bounds - made from points - self.assertArrayEqual(c.coord("lat").bounds, [[1, 3]]) + _shared_utils.assert_array_equal(c.coord("lat").bounds, [[1, 3]]) # Check new coord bounds - made from bounds cube.coord("lat").bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]] c = cube.collapsed(["lat", "lon"], iris.analysis.MEAN, weights=weights) - self.assertArrayEqual(c.coord("lat").bounds, [[0.5, 3.5]]) + _shared_utils.assert_array_equal(c.coord("lat").bounds, [[0.5, 3.5]]) cube.coord("lat").bounds = None # Check there was no residual change - self.assertCML(cube, ("analysis", "weighted_mean_source.cml")) + _shared_utils.assert_CML( + self.request, cube, ("analysis", "weighted_mean_source.cml") + ) - @tests.skip_data + @_shared_utils.skip_data def test_weighted_mean(self): # compare with pp_area_avg - which collapses both lat and lon # @@ -247,8 +262,10 @@ def test_weighted_mean(self): # print, pp_area_avg(pp, /box) #287.927 # ;gives an answer of 287.927 # - e = iris.tests.stock.simple_pp() - self.assertCML(e, ("analysis", "weighted_mean_original.cml")) + e = stock.simple_pp() + _shared_utils.assert_CML( + self.request, e, ("analysis", "weighted_mean_original.cml") + ) e.coord("latitude").guess_bounds() e.coord("longitude").guess_bounds() area_weights = iris.analysis.cartography.area_weights(e) @@ -259,9 +276,9 @@ def test_weighted_mean(self): ) g = f.collapsed("longitude", iris.analysis.MEAN, weights=collapsed_area_weights) # check it's a 0d, scalar cube - self.assertEqual(g.shape, ()) + assert g.shape == () # check the value - pp_area_avg's result of 287.927 differs by factor of 1.00002959 - np.testing.assert_approx_equal(g.data, 287.935, significant=5) + _shared_utils.assert_array_almost_equal(g.data, 287.935, decimal=2) # check we get summed weights even if we don't give any h, summed_weights = e.collapsed("latitude", iris.analysis.MEAN, returned=True) @@ -270,10 +287,12 @@ def test_weighted_mean(self): # Check there was no residual change e.coord("latitude").bounds = None e.coord("longitude").bounds = None - self.assertCML(e, ("analysis", "weighted_mean_original.cml")) + _shared_utils.assert_CML( + self.request, e, ("analysis", "weighted_mean_original.cml") + ) # Test collapsing of missing coord - self.assertRaises( + pytest.raises( iris.exceptions.CoordinateNotFoundError, e.collapsed, "platitude", @@ -281,7 +300,7 @@ def test_weighted_mean(self): ) # Test collapsing of non data coord - self.assertRaises( + pytest.raises( iris.exceptions.CoordinateCollapseError, e.collapsed, "pressure", @@ -289,13 +308,15 @@ def test_weighted_mean(self): ) -@tests.skip_data -class TestAnalysisBasic(tests.IrisTest): - def setUp(self): - file = tests.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) +@_shared_utils.skip_data +class TestAnalysisBasic: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + file = _shared_utils.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) cubes = iris.load(file) self.cube = cubes[0] - self.assertCML(self.cube, ("analysis", "original.cml")) + _shared_utils.assert_CML(self.request, self.cube, ("analysis", "original.cml")) def _common( self, @@ -307,15 +328,16 @@ def _common( ): self.cube.data = self.cube.data.astype(np.float64) - self.assertCML(self.cube, ("analysis", original_name)) + _shared_utils.assert_CML(self.request, self.cube, ("analysis", original_name)) a = self.cube.collapsed("grid_latitude", aggregate) - self.assertCMLApproxData( - a, ("analysis", "%s_latitude.cml" % name), *args, **kwargs + _shared_utils.assert_CML_approx_data( + self.request, a, ("analysis", "%s_latitude.cml" % name), *args, **kwargs ) b = a.collapsed("grid_longitude", aggregate) - self.assertCMLApproxData( + _shared_utils.assert_CML_approx_data( + self.request, b, ("analysis", "%s_latitude_longitude.cml" % name), *args, @@ -323,7 +345,8 @@ def _common( ) c = self.cube.collapsed(["grid_latitude", "grid_longitude"], aggregate) - self.assertCMLApproxData( + _shared_utils.assert_CML_approx_data( + self.request, c, ("analysis", "%s_latitude_longitude_1call.cml" % name), *args, @@ -331,7 +354,7 @@ def _common( ) # Check there was no residual change - self.assertCML(self.cube, ("analysis", original_name)) + _shared_utils.assert_CML(self.request, self.cube, ("analysis", original_name)) def test_mean(self): self._common("mean", iris.analysis.MEAN, rtol=1e-05) @@ -369,12 +392,13 @@ def test_rms(self): self._common("rms", iris.analysis.RMS) def test_duplicate_coords(self): - self.assertRaises(ValueError, tests.stock.track_1d, duplicate_x=True) + pytest.raises(ValueError, stock.track_1d, duplicate_x=True) -class TestMissingData(tests.IrisTest): - def setUp(self): - self.cube_with_nan = tests.stock.simple_2d() +class TestMissingData: + @pytest.fixture(autouse=True) + def _setup(self): + self.cube_with_nan = stock.simple_2d() data = self.cube_with_nan.data.astype(np.float32) self.cube_with_nan.data = data.copy() @@ -382,36 +406,37 @@ def setUp(self): self.cube_with_nan.data[2, 2] = np.nan self.cube_with_nan.data[2, 3] = np.nan - self.cube_with_mask = tests.stock.simple_2d() + self.cube_with_mask = stock.simple_2d() self.cube_with_mask.data = ma.array( self.cube_with_nan.data, mask=np.isnan(self.cube_with_nan.data) ) def test_max(self): cube = self.cube_with_nan.collapsed("foo", iris.analysis.MAX) - np.testing.assert_array_equal(cube.data, np.array([3, np.nan, np.nan])) + _shared_utils.assert_array_equal(cube.data, np.array([3, np.nan, np.nan])) cube = self.cube_with_mask.collapsed("foo", iris.analysis.MAX) - np.testing.assert_array_equal(cube.data, np.array([3, 7, 9])) + _shared_utils.assert_array_equal(cube.data, np.array([3, 7, 9])) def test_min(self): cube = self.cube_with_nan.collapsed("foo", iris.analysis.MIN) - np.testing.assert_array_equal(cube.data, np.array([0, np.nan, np.nan])) + _shared_utils.assert_array_equal(cube.data, np.array([0, np.nan, np.nan])) cube = self.cube_with_mask.collapsed("foo", iris.analysis.MIN) - np.testing.assert_array_equal(cube.data, np.array([0, 5, 8])) + _shared_utils.assert_array_equal(cube.data, np.array([0, 5, 8])) def test_sum(self): cube = self.cube_with_nan.collapsed("foo", iris.analysis.SUM) - np.testing.assert_array_equal(cube.data, np.array([6, np.nan, np.nan])) + _shared_utils.assert_array_equal(cube.data, np.array([6, np.nan, np.nan])) cube = self.cube_with_mask.collapsed("foo", iris.analysis.SUM) - np.testing.assert_array_equal(cube.data, np.array([6, 18, 17])) + _shared_utils.assert_array_equal(cube.data, np.array([6, 18, 17])) -class TestAuxCoordCollapse(tests.IrisTest): - def setUp(self): - self.cube_with_aux_coord = tests.stock.simple_4d_with_hybrid_height() +class TestAuxCoordCollapse: + @pytest.fixture(autouse=True) + def _setup(self): + self.cube_with_aux_coord = stock.simple_4d_with_hybrid_height() # Guess bounds to get the weights self.cube_with_aux_coord.coord("grid_latitude").guess_bounds() @@ -419,12 +444,12 @@ def setUp(self): def test_max(self): cube = self.cube_with_aux_coord.collapsed("grid_latitude", iris.analysis.MAX) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").points, np.array([112, 113, 114, 115, 116, 117]), ) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").bounds, np.array( [ @@ -441,29 +466,30 @@ def test_max(self): # Check collapsing over the whole coord still works cube = self.cube_with_aux_coord.collapsed("altitude", iris.analysis.MAX) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").points, np.array([114]) ) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").bounds, np.array([[100, 129]]) ) cube = self.cube_with_aux_coord.collapsed("grid_longitude", iris.analysis.MAX) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").points, np.array([102, 108, 114, 120, 126]), ) - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( cube.coord("surface_altitude").bounds, np.array([[100, 105], [106, 111], [112, 117], [118, 123], [124, 129]]), ) -class TestAggregator_mdtol_keyword(tests.IrisTest): - def setUp(self): +class TestAggregator_mdtol_keyword: + @pytest.fixture(autouse=True) + def _setup(self): data = ma.array( [[1, 2], [4, 5]], dtype=np.float32, @@ -483,7 +509,7 @@ def setUp(self): def test_single_coord_no_mdtol(self): collapsed = self.cube.collapsed(self.cube.coord("lat"), iris.analysis.MEAN) t = ma.array([2.5, 5.0], mask=[False, True]) - self.assertMaskedArrayEqual(collapsed.data, t) + _shared_utils.assert_masked_array_almost_equal(collapsed.data, t) def test_single_coord_mdtol(self): self.cube.data.mask = np.array([[False, True], [False, False]]) @@ -491,7 +517,7 @@ def test_single_coord_mdtol(self): self.cube.coord("lat"), iris.analysis.MEAN, mdtol=0.5 ) t = ma.array([2.5, 5], mask=[False, False]) - self.assertMaskedArrayEqual(collapsed.data, t) + _shared_utils.assert_masked_array_almost_equal(collapsed.data, t) def test_single_coord_mdtol_alt(self): self.cube.data.mask = np.array([[False, True], [False, False]]) @@ -499,7 +525,7 @@ def test_single_coord_mdtol_alt(self): self.cube.coord("lat"), iris.analysis.MEAN, mdtol=0.4 ) t = ma.array([2.5, 5], mask=[False, True]) - self.assertMaskedArrayEqual(collapsed.data, t) + _shared_utils.assert_masked_array_almost_equal(collapsed.data, t) def test_multi_coord_no_mdtol(self): collapsed = self.cube.collapsed( @@ -507,7 +533,7 @@ def test_multi_coord_no_mdtol(self): iris.analysis.MEAN, ) t = np.array(2.5) - self.assertArrayEqual(collapsed.data, t) + _shared_utils.assert_array_equal(collapsed.data, t) def test_multi_coord_mdtol(self): collapsed = self.cube.collapsed( @@ -516,10 +542,14 @@ def test_multi_coord_mdtol(self): mdtol=0.4, ) t = ma.array(2.5, mask=True) - self.assertMaskedArrayEqual(collapsed.data, t) + _shared_utils.assert_masked_array_almost_equal(collapsed.data, t) -class TestAggregators(tests.IrisTest): +class TestAggregators: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def _check_collapsed_percentile( self, cube, @@ -537,30 +567,32 @@ def _check_collapsed_percentile( percent=percents, **kwargs, ) - np.testing.assert_array_almost_equal(result.data, expected_result) - self.assertEqual(type(result.data), cube_data_type) + _shared_utils.assert_array_almost_equal(result.data, expected_result) + assert type(result.data) is cube_data_type if CML_filename is not None: - self.assertCML(result, ("analysis", CML_filename), checksum=False) + _shared_utils.assert_CML( + self.request, result, ("analysis", CML_filename), checksum=False + ) def _check_percentile(self, data, axis, percents, expected_result, **kwargs): result = iris.analysis._percentile(data, axis, percents, **kwargs) - np.testing.assert_array_almost_equal(result, expected_result) - self.assertEqual(type(result), type(expected_result)) + _shared_utils.assert_array_almost_equal(result, expected_result) + assert type(result) is type(expected_result) def test_percentile_1d_25_percent(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() self._check_collapsed_percentile( cube, 25, "foo", 2.5, CML_filename="first_quartile_foo_1d.cml" ) def test_percentile_1d_75_percent(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() self._check_collapsed_percentile( cube, 75, "foo", 7.5, CML_filename="third_quartile_foo_1d.cml" ) def test_fast_percentile_1d_25_percent(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() self._check_collapsed_percentile( cube, 25, @@ -571,7 +603,7 @@ def test_fast_percentile_1d_25_percent(self): ) def test_fast_percentile_1d_75_percent(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() self._check_collapsed_percentile( cube, 75, @@ -582,7 +614,7 @@ def test_fast_percentile_1d_75_percent(self): ) def test_fast_percentile_1d_75_percent_masked_type_no_mask(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() cube.data = ma.MaskedArray(cube.data) self._check_collapsed_percentile( cube, @@ -594,7 +626,7 @@ def test_fast_percentile_1d_75_percent_masked_type_no_mask(self): ) def test_percentile_2d_single_coord(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() self._check_collapsed_percentile( cube, 25, @@ -604,7 +636,7 @@ def test_percentile_2d_single_coord(self): ) def test_percentile_2d_two_coords(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() self._check_collapsed_percentile( cube, 25, @@ -614,7 +646,7 @@ def test_percentile_2d_two_coords(self): ) def test_fast_percentile_2d_single_coord(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() self._check_collapsed_percentile( cube, 25, @@ -625,7 +657,7 @@ def test_fast_percentile_2d_single_coord(self): ) def test_fast_percentile_2d_two_coords(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() self._check_collapsed_percentile( cube, 25, @@ -636,7 +668,7 @@ def test_fast_percentile_2d_two_coords(self): ) def test_fast_percentile_2d_single_coord_masked_type_no_mask(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() cube.data = ma.MaskedArray(cube.data) self._check_collapsed_percentile( cube, @@ -648,7 +680,7 @@ def test_fast_percentile_2d_single_coord_masked_type_no_mask(self): ) def test_fast_percentile_2d_two_coords_masked_type_no_mask(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() cube.data = ma.MaskedArray(cube.data) self._check_collapsed_percentile( cube, @@ -744,7 +776,7 @@ def test_fast_percentile_3d_axis_two_masked_type_no_mask(self): ) def test_percentile_3d_masked(self): - cube = tests.stock.simple_3d_mask() + cube = stock.simple_3d_mask() expected_result = [ [12.0, 13.0, 14.0, 15.0], [16.0, 17.0, 18.0, 19.0], @@ -760,10 +792,10 @@ def test_percentile_3d_masked(self): ) def test_fast_percentile_3d_masked_type_masked(self): - cube = tests.stock.simple_3d_mask() + cube = stock.simple_3d_mask() msg = "Cannot use fast np.percentile method with masked array." - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): cube.collapsed( "wibble", iris.analysis.PERCENTILE, @@ -772,7 +804,7 @@ def test_fast_percentile_3d_masked_type_masked(self): ) def test_percentile_3d_notmasked(self): - cube = tests.stock.simple_3d() + cube = stock.simple_3d() expected_result = [ [9.0, 10.0, 11.0, 12.0], [13.0, 14.0, 15.0, 16.0], @@ -788,7 +820,7 @@ def test_percentile_3d_notmasked(self): ) def test_fast_percentile_3d_notmasked(self): - cube = tests.stock.simple_3d() + cube = stock.simple_3d() expected_result = [ [9.0, 10.0, 11.0, 12.0], [13.0, 14.0, 15.0, 16.0], @@ -805,42 +837,50 @@ def test_fast_percentile_3d_notmasked(self): ) def test_proportion(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() assert np.any(cube.data >= 5) gt5 = cube.collapsed( "foo", iris.analysis.PROPORTION, function=lambda val: val >= 5 ) - np.testing.assert_array_almost_equal(gt5.data, np.array([6 / 11.0])) - self.assertCML(gt5, ("analysis", "proportion_foo_1d.cml"), checksum=False) + _shared_utils.assert_array_almost_equal(gt5.data, np.array([6 / 11.0])) + _shared_utils.assert_CML( + self.request, gt5, ("analysis", "proportion_foo_1d.cml"), checksum=False + ) def test_proportion_2d(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() gt6 = cube.collapsed( "foo", iris.analysis.PROPORTION, function=lambda val: val >= 6 ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6.data, np.array([0, 0.5, 1], dtype=np.float32) ) - self.assertCML(gt6, ("analysis", "proportion_foo_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "proportion_foo_2d.cml"), checksum=False + ) gt6 = cube.collapsed( "bar", iris.analysis.PROPORTION, function=lambda val: val >= 6 ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6.data, np.array([1 / 3, 1 / 3, 2 / 3, 2 / 3], dtype=np.float32) ) - self.assertCML(gt6, ("analysis", "proportion_bar_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "proportion_bar_2d.cml"), checksum=False + ) gt6 = cube.collapsed( ("foo", "bar"), iris.analysis.PROPORTION, function=lambda val: val >= 6, ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6.data, np.array([0.5], dtype=np.float32) ) - self.assertCML(gt6, ("analysis", "proportion_foo_bar_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "proportion_foo_bar_2d.cml"), checksum=False + ) # mask the data cube.data = ma.array(cube.data, mask=cube.data % 2) @@ -848,7 +888,7 @@ def test_proportion_2d(self): gt6_masked = cube.collapsed( "bar", iris.analysis.PROPORTION, function=lambda val: val >= 6 ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6_masked.data, ma.array( [1 / 3, None, 1 / 2, None], @@ -856,58 +896,71 @@ def test_proportion_2d(self): dtype=np.float32, ), ) - self.assertCML( + _shared_utils.assert_CML( + self.request, gt6_masked, ("analysis", "proportion_foo_2d_masked.cml"), checksum=False, ) def test_count(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() gt5 = cube.collapsed("foo", iris.analysis.COUNT, function=lambda val: val >= 5) - np.testing.assert_array_almost_equal(gt5.data, np.array([6])) + _shared_utils.assert_array_almost_equal(gt5.data, np.array([6])) gt5.data = gt5.data.astype("i8") - self.assertCML(gt5, ("analysis", "count_foo_1d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt5, ("analysis", "count_foo_1d.cml"), checksum=False + ) def test_count_2d(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() gt6 = cube.collapsed("foo", iris.analysis.COUNT, function=lambda val: val >= 6) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6.data, np.array([0, 2, 4], dtype=np.float32) ) gt6.data = gt6.data.astype("i8") - self.assertCML(gt6, ("analysis", "count_foo_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "count_foo_2d.cml"), checksum=False + ) gt6 = cube.collapsed("bar", iris.analysis.COUNT, function=lambda val: val >= 6) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( gt6.data, np.array([1, 1, 2, 2], dtype=np.float32) ) gt6.data = gt6.data.astype("i8") - self.assertCML(gt6, ("analysis", "count_bar_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "count_bar_2d.cml"), checksum=False + ) gt6 = cube.collapsed( ("foo", "bar"), iris.analysis.COUNT, function=lambda val: val >= 6 ) - np.testing.assert_array_almost_equal(gt6.data, np.array([6], dtype=np.float32)) + _shared_utils.assert_array_almost_equal( + gt6.data, np.array([6], dtype=np.float32) + ) gt6.data = gt6.data.astype("i8") - self.assertCML(gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False) + _shared_utils.assert_CML( + self.request, gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False + ) def test_max_run_1d(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() # [ 0 1 2 3 4 5 6 7 8 9 10] result = cube.collapsed( "foo", iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), ) - self.assertArrayEqual(result.data, np.array(3)) - self.assertEqual(result.units, 1) - self.assertTupleEqual(result.cell_methods, ()) - self.assertCML(result, ("analysis", "max_run_foo_1d.cml"), checksum=False) + _shared_utils.assert_array_equal(result.data, np.array(3)) + assert result.units == 1 + assert result.cell_methods == () + _shared_utils.assert_CML( + self.request, result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) def test_max_run_lazy(self): - cube = tests.stock.simple_1d() + cube = stock.simple_1d() # [ 0 1 2 3 4 5 6 7 8 9 10] # Make data lazy cube.data = da.from_array(cube.data) @@ -916,16 +969,18 @@ def test_max_run_lazy(self): iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), ) - self.assertTrue(result.has_lazy_data()) + assert result.has_lazy_data() # Realise data _ = result.data - self.assertArrayEqual(result.data, np.array(3)) - self.assertEqual(result.units, 1) - self.assertTupleEqual(result.cell_methods, ()) - self.assertCML(result, ("analysis", "max_run_foo_1d.cml"), checksum=False) + _shared_utils.assert_array_equal(result.data, np.array(3)) + assert result.units == 1 + assert result.cell_methods == () + _shared_utils.assert_CML( + self.request, result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) def test_max_run_2d(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() # [[ 0 1 2 3] # [ 4 5 6 7] # [ 8 9 10 11]] @@ -934,22 +989,30 @@ def test_max_run_2d(self): iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), ) - self.assertArrayEqual(foo_result.data, np.array([1, 2, 1], dtype=np.float32)) - self.assertEqual(foo_result.units, 1) - self.assertTupleEqual(foo_result.cell_methods, ()) - self.assertCML(foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False) + _shared_utils.assert_array_equal( + foo_result.data, np.array([1, 2, 1], dtype=np.float32) + ) + assert foo_result.units == 1 + assert foo_result.cell_methods == () + _shared_utils.assert_CML( + self.request, foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False + ) bar_result = cube.collapsed( "bar", iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), ) - self.assertArrayEqual(bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32)) - self.assertEqual(bar_result.units, 1) - self.assertTupleEqual(bar_result.cell_methods, ()) - self.assertCML(bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False) + _shared_utils.assert_array_equal( + bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32) + ) + assert bar_result.units == 1 + assert bar_result.cell_methods == () + _shared_utils.assert_CML( + self.request, bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False + ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = cube.collapsed( ("foo", "bar"), iris.analysis.MAX_RUN, @@ -957,7 +1020,7 @@ def test_max_run_2d(self): ) def test_max_run_masked(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() # [[ 0 1 2 3] # [ 4 5 6 7] # [ 8 9 10 11]] @@ -972,40 +1035,51 @@ def test_max_run_masked(self): iris.analysis.MAX_RUN, function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 9, 10, 11]), ) - self.assertArrayEqual(result.data, np.array([1, 1, 2, 0], dtype=np.float32)) - self.assertEqual(result.units, 1) - self.assertTupleEqual(result.cell_methods, ()) - self.assertCML( - result, ("analysis", "max_run_bar_2d_masked.cml"), checksum=False + _shared_utils.assert_array_equal( + result.data, np.array([1, 1, 2, 0], dtype=np.float32) + ) + assert result.units == 1 + assert result.cell_methods == () + _shared_utils.assert_CML( + self.request, + result, + ("analysis", "max_run_bar_2d_masked.cml"), + checksum=False, ) def test_weighted_sum_consistency(self): # weighted sum with unit weights should be the same as a sum - cube = tests.stock.simple_1d() + cube = stock.simple_1d() normal_sum = cube.collapsed("foo", iris.analysis.SUM) weights = np.ones_like(cube.data) weighted_sum = cube.collapsed("foo", iris.analysis.SUM, weights=weights) - self.assertArrayAlmostEqual(normal_sum.data, weighted_sum.data) + _shared_utils.assert_array_almost_equal(normal_sum.data, weighted_sum.data) def test_weighted_sum_1d(self): # verify 1d weighted sum is correct - cube = tests.stock.simple_1d() + cube = stock.simple_1d() weights = np.array([0.05, 0.05, 0.1, 0.1, 0.2, 0.3, 0.2, 0.1, 0.1, 0.05, 0.05]) result = cube.collapsed("foo", iris.analysis.SUM, weights=weights) - self.assertAlmostEqual(result.data, 6.5) - self.assertCML(result, ("analysis", "sum_weighted_1d.cml"), checksum=False) + assert result.data == pytest.approx(6.5) + _shared_utils.assert_CML( + self.request, result, ("analysis", "sum_weighted_1d.cml"), checksum=False + ) def test_weighted_sum_2d(self): # verify 2d weighted sum is correct - cube = tests.stock.simple_2d() + cube = stock.simple_2d() weights = np.array([0.3, 0.4, 0.3]) weights = iris.util.broadcast_to_shape(weights, cube.shape, [0]) result = cube.collapsed("bar", iris.analysis.SUM, weights=weights) - self.assertArrayAlmostEqual(result.data, np.array([4.0, 5.0, 6.0, 7.0])) - self.assertCML(result, ("analysis", "sum_weighted_2d.cml"), checksum=False) + _shared_utils.assert_array_almost_equal( + result.data, np.array([4.0, 5.0, 6.0, 7.0]) + ) + _shared_utils.assert_CML( + self.request, result, ("analysis", "sum_weighted_2d.cml"), checksum=False + ) def test_weighted_rms(self): - cube = tests.stock.simple_2d() + cube = stock.simple_2d() # modify cube data so that the results are nice numbers cube.data = np.array( [[4, 7, 10, 8], [21, 30, 12, 24], [14, 16, 20, 8]], @@ -1017,12 +1091,14 @@ def test_weighted_rms(self): ) expected_result = np.array([8.0, 24.0, 16.0]) result = cube.collapsed("foo", iris.analysis.RMS, weights=weights) - self.assertArrayAlmostEqual(result.data, expected_result) - self.assertCML(result, ("analysis", "rms_weighted_2d.cml"), checksum=False) + _shared_utils.assert_array_almost_equal(result.data, expected_result) + _shared_utils.assert_CML( + self.request, result, ("analysis", "rms_weighted_2d.cml"), checksum=False + ) -@tests.skip_data -class TestRotatedPole(tests.IrisTest): +@_shared_utils.skip_data +class TestRotatedPole: def _check_both_conversions(self, cube, index): rlons, rlats = iris.analysis.cartography.get_xy_grids(cube) rcs = cube.coord_system("RotatedGeogCS") @@ -1032,21 +1108,21 @@ def _check_both_conversions(self, cube, index): rcs.grid_north_pole_longitude, rcs.grid_north_pole_latitude, ) - self.assertDataAlmostEqual( + _shared_utils.assert_data_almost_equal( x, ("analysis", "rotated_pole.{}.x.json".format(index)) ) - self.assertDataAlmostEqual( + _shared_utils.assert_data_almost_equal( y, ("analysis", "rotated_pole.{}.y.json".format(index)) ) - self.assertDataAlmostEqual( + _shared_utils.assert_data_almost_equal( rlons, ("analysis", "rotated_pole.{}.rlon.json".format(index)) ) - self.assertDataAlmostEqual( + _shared_utils.assert_data_almost_equal( rlats, ("analysis", "rotated_pole.{}.rlat.json".format(index)) ) def test_all(self): - path = tests.get_data_path(("PP", "ukVorog", "ukv_orog_refonly.pp")) + path = _shared_utils.get_data_path(("PP", "ukVorog", "ukv_orog_refonly.pp")) master_cube = iris.load_cube(path) # Check overall behaviour. @@ -1067,8 +1143,8 @@ def test_unrotate_nd(self): solx = np.array([[-16.42176094, -14.85892262], [-16.71055023, -14.58434624]]) soly = np.array([[46.00724251, 51.29188893], [46.98728486, 50.30706042]]) - self.assertArrayAlmostEqual(resx, solx) - self.assertArrayAlmostEqual(resy, soly) + _shared_utils.assert_array_almost_equal(resx, solx) + _shared_utils.assert_array_almost_equal(resy, soly) def test_unrotate_1d(self): rlons = np.array([350.0, 352.0, 354.0, 356.0]) @@ -1082,8 +1158,8 @@ def test_unrotate_1d(self): solx = np.array([-16.42176094, -14.85892262, -12.88946157, -10.35078336]) soly = np.array([46.00724251, 51.29188893, 56.55031485, 61.77015703]) - self.assertArrayAlmostEqual(resx, solx) - self.assertArrayAlmostEqual(resy, soly) + _shared_utils.assert_array_almost_equal(resx, solx) + _shared_utils.assert_array_almost_equal(resy, soly) def test_rotate_nd(self): rlons = np.array([[350.0, 351.0], [352.0, 353.0]]) @@ -1095,8 +1171,8 @@ def test_rotate_nd(self): solx = np.array([[148.69672569, 149.24727087], [149.79067025, 150.31754368]]) soly = np.array([[18.60905789, 23.67749384], [28.74419024, 33.8087963]]) - self.assertArrayAlmostEqual(resx, solx) - self.assertArrayAlmostEqual(resy, soly) + _shared_utils.assert_array_almost_equal(resx, solx) + _shared_utils.assert_array_almost_equal(resy, soly) def test_rotate_1d(self): rlons = np.array([350.0, 351.0, 352.0, 353.0]) @@ -1110,22 +1186,27 @@ def test_rotate_1d(self): solx = np.array([148.69672569, 149.24727087, 149.79067025, 150.31754368]) soly = np.array([18.60905789, 23.67749384, 28.74419024, 33.8087963]) - self.assertArrayAlmostEqual(resx, solx) - self.assertArrayAlmostEqual(resy, soly) + _shared_utils.assert_array_almost_equal(resx, solx) + _shared_utils.assert_array_almost_equal(resy, soly) -@tests.skip_data -class TestAreaWeights(tests.IrisTest): +@_shared_utils.skip_data +class TestAreaWeights: # Note: chunks is simply ignored for non-lazy data @pytest.mark.parametrize("chunks", [None, (2, 3)]) + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def test_area_weights(self): - small_cube = iris.tests.stock.simple_pp() + small_cube = stock.simple_pp() # Get offset, subsampled region: small enough to test against literals small_cube = small_cube[10:, 35:] small_cube = small_cube[::8, ::8] small_cube = small_cube[:5, :4] # pre-check non-data properties - self.assertCML( + _shared_utils.assert_CML( + self.request, small_cube, ("analysis", "areaweights_original.cml"), checksum=False, @@ -1145,19 +1226,20 @@ def test_area_weights(self): ], dtype=np.float64, ) - self.assertArrayAllClose(area_weights, expected_results, rtol=1e-8) + _shared_utils.assert_array_all_close(area_weights, expected_results, rtol=1e-8) # Check there was no residual change small_cube.coord("latitude").bounds = None small_cube.coord("longitude").bounds = None - self.assertCML( + _shared_utils.assert_CML( + self.request, small_cube, ("analysis", "areaweights_original.cml"), checksum=False, ) -@tests.skip_data +@_shared_utils.skip_data class TestLazyAreaWeights: @pytest.mark.parametrize("normalize", [True, False]) @pytest.mark.parametrize("chunks", [None, (2, 3, 4), (2, 2, 2)]) @@ -1198,71 +1280,72 @@ def test_lazy_area_weights(self, chunks, normalize): np.testing.assert_allclose(area_weights.compute(), expected) -@tests.skip_data -class TestAreaWeightGeneration(tests.IrisTest): - def setUp(self): - self.cube = iris.tests.stock.realistic_4d() +@_shared_utils.skip_data +class TestAreaWeightGeneration: + @pytest.fixture(autouse=True) + def _setup(self): + self.cube = stock.realistic_4d() def test_area_weights_std(self): # weights for stock 4d data weights = iris.analysis.cartography.area_weights(self.cube) - self.assertEqual(weights.shape, self.cube.shape) + assert weights.shape == self.cube.shape def test_area_weights_order(self): # weights for data with dimensions in a different order order = [3, 2, 1, 0] # (lon, lat, level, time) self.cube.transpose(order) weights = iris.analysis.cartography.area_weights(self.cube) - self.assertEqual(weights.shape, self.cube.shape) + assert weights.shape == self.cube.shape def test_area_weights_non_adjacent(self): # weights for cube with non-adjacent latitude/longitude dimensions order = [0, 3, 1, 2] # (time, lon, level, lat) self.cube.transpose(order) weights = iris.analysis.cartography.area_weights(self.cube) - self.assertEqual(weights.shape, self.cube.shape) + assert weights.shape == self.cube.shape def test_area_weights_scalar_latitude(self): # weights for cube with a scalar latitude dimension cube = self.cube[:, :, 0, :] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_scalar_longitude(self): # weights for cube with a scalar longitude dimension cube = self.cube[:, :, :, 0] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_scalar(self): # weights for cube with scalar latitude and longitude dimensions cube = self.cube[:, :, 0, 0] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_singleton_latitude(self): # singleton (1-point) latitude dimension cube = self.cube[:, :, 0:1, :] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_singleton_longitude(self): # singleton (1-point) longitude dimension cube = self.cube[:, :, :, 0:1] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_singletons(self): # singleton (1-point) latitude and longitude dimensions cube = self.cube[:, :, 0:1, 0:1] weights = iris.analysis.cartography.area_weights(cube) - self.assertEqual(weights.shape, cube.shape) + assert weights.shape == cube.shape def test_area_weights_normalized(self): # normalized area weights must sum to one over lat/lon dimensions. weights = iris.analysis.cartography.area_weights(self.cube, normalize=True) sumweights = weights.sum(axis=3).sum(axis=2) # sum over lon and lat - self.assertArrayAlmostEqual(sumweights, 1) + _shared_utils.assert_array_almost_equal(sumweights, 1) def test_area_weights_non_contiguous(self): # Slice the cube so that we have non-contiguous longitude @@ -1271,23 +1354,24 @@ def test_area_weights_non_contiguous(self): cube = self.cube[..., ind] weights = iris.analysis.cartography.area_weights(cube) expected = iris.analysis.cartography.area_weights(self.cube)[..., ind] - self.assertArrayEqual(weights, expected) + _shared_utils.assert_array_equal(weights, expected) def test_area_weights_no_lon_bounds(self): self.cube.coord("grid_longitude").bounds = None - with self.assertRaises(ValueError): + with pytest.raises(ValueError): iris.analysis.cartography.area_weights(self.cube) def test_area_weights_no_lat_bounds(self): self.cube.coord("grid_latitude").bounds = None - with self.assertRaises(ValueError): + with pytest.raises(ValueError): iris.analysis.cartography.area_weights(self.cube) -@tests.skip_data -class TestLatitudeWeightGeneration(tests.IrisTest): - def setUp(self): - path = iris.tests.get_data_path( +@_shared_utils.skip_data +class TestLatitudeWeightGeneration: + @pytest.fixture(autouse=True) + def _setup(self): + path = _shared_utils.get_data_path( ["NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc"] ) self.cube = iris.load_cube(path) @@ -1317,52 +1401,58 @@ def test_cosine_latitude_weights_range(self): ) cube.add_dim_coord(lat_coord, 0) weights = iris.analysis.cartography.cosine_latitude_weights(cube) - self.assertTrue(weights.max() <= 1) - self.assertTrue(weights.min() >= 0) + assert weights.max() <= 1 + assert weights.min() >= 0 def test_cosine_latitude_weights_0d(self): # 0d latitude dimension (scalar coordinate) weights = iris.analysis.cartography.cosine_latitude_weights( self.cube_dim_lat[:, 0, :] ) - self.assertEqual(weights.shape, self.cube_dim_lat[:, 0, :].shape) - self.assertAlmostEqual(weights[0, 0], np.cos(np.deg2rad(self.lat1d[0]))) + assert weights.shape == self.cube_dim_lat[:, 0, :].shape + assert weights[0, 0] == pytest.approx(np.cos(np.deg2rad(self.lat1d[0]))) def test_cosine_latitude_weights_1d_singleton(self): # singleton (1-point) 1d latitude coordinate (time, lat, lon) cube = self.cube_dim_lat[:, 0:1, :] weights = iris.analysis.cartography.cosine_latitude_weights(cube) - self.assertEqual(weights.shape, cube.shape) - self.assertAlmostEqual(weights[0, 0, 0], np.cos(np.deg2rad(self.lat1d[0]))) + assert weights.shape == cube.shape + assert weights[0, 0, 0] == pytest.approx(np.cos(np.deg2rad(self.lat1d[0]))) def test_cosine_latitude_weights_1d(self): # 1d latitude coordinate (time, lat, lon) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) - self.assertEqual(weights.shape, self.cube.shape) - self.assertArrayAlmostEqual(weights[0, :, 0], np.cos(np.deg2rad(self.lat1d))) + assert weights.shape == self.cube.shape + _shared_utils.assert_array_almost_equal( + weights[0, :, 0], np.cos(np.deg2rad(self.lat1d)) + ) def test_cosine_latitude_weights_1d_latitude_first(self): # 1d latitude coordinate with latitude first (lat, time, lon) order = [1, 0, 2] # (lat, time, lon) self.cube_dim_lat.transpose(order) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) - self.assertEqual(weights.shape, self.cube_dim_lat.shape) - self.assertArrayAlmostEqual(weights[:, 0, 0], np.cos(np.deg2rad(self.lat1d))) + assert weights.shape == self.cube_dim_lat.shape + _shared_utils.assert_array_almost_equal( + weights[:, 0, 0], np.cos(np.deg2rad(self.lat1d)) + ) def test_cosine_latitude_weights_1d_latitude_last(self): # 1d latitude coordinate with latitude last (time, lon, lat) order = [0, 2, 1] # (time, lon, lat) self.cube_dim_lat.transpose(order) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) - self.assertEqual(weights.shape, self.cube_dim_lat.shape) - self.assertArrayAlmostEqual(weights[0, 0, :], np.cos(np.deg2rad(self.lat1d))) + assert weights.shape == self.cube_dim_lat.shape + _shared_utils.assert_array_almost_equal( + weights[0, 0, :], np.cos(np.deg2rad(self.lat1d)) + ) def test_cosine_latitude_weights_2d_singleton1(self): # 2d latitude coordinate with first dimension singleton cube = self.cube_aux_lat[:, 0:1, :] weights = iris.analysis.cartography.cosine_latitude_weights(cube) - self.assertEqual(weights.shape, cube.shape) - self.assertArrayAlmostEqual( + assert weights.shape == cube.shape + _shared_utils.assert_array_almost_equal( weights[0, :, :], np.cos(np.deg2rad(self.lat2d[0:1, :])) ) @@ -1370,8 +1460,8 @@ def test_cosine_latitude_weights_2d_singleton2(self): # 2d latitude coordinate with second dimension singleton cube = self.cube_aux_lat[:, :, 0:1] weights = iris.analysis.cartography.cosine_latitude_weights(cube) - self.assertEqual(weights.shape, cube.shape) - self.assertArrayAlmostEqual( + assert weights.shape == cube.shape + _shared_utils.assert_array_almost_equal( weights[0, :, :], np.cos(np.deg2rad(self.lat2d[:, 0:1])) ) @@ -1379,47 +1469,56 @@ def test_cosine_latitude_weights_2d_singleton3(self): # 2d latitude coordinate with both dimensions singleton cube = self.cube_aux_lat[:, 0:1, 0:1] weights = iris.analysis.cartography.cosine_latitude_weights(cube) - self.assertEqual(weights.shape, cube.shape) - self.assertArrayAlmostEqual( + assert weights.shape == cube.shape + _shared_utils.assert_array_almost_equal( weights[0, :, :], np.cos(np.deg2rad(self.lat2d[0:1, 0:1])) ) def test_cosine_latitude_weights_2d(self): # 2d latitude coordinate (time, lat, lon) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) - self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual(weights[0, :, :], np.cos(np.deg2rad(self.lat2d))) + assert weights.shape == self.cube_aux_lat.shape + _shared_utils.assert_array_almost_equal( + weights[0, :, :], np.cos(np.deg2rad(self.lat2d)) + ) def test_cosine_latitude_weights_2d_latitude_first(self): # 2d latitude coordinate with latitude first (lat, time, lon) order = [1, 0, 2] # (lat, time, lon) self.cube_aux_lat.transpose(order) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) - self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual(weights[:, 0, :], np.cos(np.deg2rad(self.lat2d))) + assert weights.shape == self.cube_aux_lat.shape + _shared_utils.assert_array_almost_equal( + weights[:, 0, :], np.cos(np.deg2rad(self.lat2d)) + ) def test_cosine_latitude_weights_2d_latitude_last(self): # 2d latitude coordinate with latitude last (time, lon, lat) order = [0, 2, 1] # (time, lon, lat) self.cube_aux_lat.transpose(order) weights = iris.analysis.cartography.cosine_latitude_weights(self.cube_aux_lat) - self.assertEqual(weights.shape, self.cube_aux_lat.shape) - self.assertArrayAlmostEqual(weights[0, :, :], np.cos(np.deg2rad(self.lat2d.T))) + assert weights.shape == self.cube_aux_lat.shape + _shared_utils.assert_array_almost_equal( + weights[0, :, :], np.cos(np.deg2rad(self.lat2d.T)) + ) def test_cosine_latitude_weights_no_latitude(self): # no coordinate identified as latitude self.cube_dim_lat.remove_coord("grid_latitude") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) def test_cosine_latitude_weights_multiple_latitude(self): # two coordinates identified as latitude - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube) -class TestRollingWindow(tests.IrisTest): - def setUp(self): +class TestRollingWindow: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + # XXX Comes from test_aggregated_by cube = iris.cube.Cube( np.array([[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]]), @@ -1450,7 +1549,7 @@ def test_non_mean_operator(self): expected_result = np.array( [[10, 12, 18], [12, 14, 20], [18, 12, 10]], dtype=np.float64 ) - self.assertArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_array_equal(expected_result, res_cube.data) def test_longitude_simple(self): res_cube = self.cube.rolling_window("longitude", iris.analysis.MEAN, window=2) @@ -1460,11 +1559,15 @@ def test_longitude_simple(self): dtype=np.float64, ) - self.assertArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_array_equal(expected_result, res_cube.data) - self.assertCML(res_cube, ("analysis", "rolling_window", "simple_longitude.cml")) + _shared_utils.assert_CML( + self.request, + res_cube, + ("analysis", "rolling_window", "simple_longitude.cml"), + ) - self.assertRaises( + pytest.raises( ValueError, self.cube.rolling_window, "longitude", @@ -1493,12 +1596,12 @@ def test_longitude_masked(self): dtype=np.float64, ) - self.assertMaskedArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_masked_array_almost_equal(expected_result, res_cube.data) def test_longitude_circular(self): cube = self.cube cube.coord("longitude").circular = True - self.assertRaises( + pytest.raises( iris.exceptions.NotYetImplementedError, self.cube.rolling_window, "longitude", @@ -1511,12 +1614,16 @@ def test_different_length_windows(self): expected_result = np.array([[11.5], [13.5], [11.5]], dtype=np.float64) - self.assertArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_array_equal(expected_result, res_cube.data) - self.assertCML(res_cube, ("analysis", "rolling_window", "size_4_longitude.cml")) + _shared_utils.assert_CML( + self.request, + res_cube, + ("analysis", "rolling_window", "size_4_longitude.cml"), + ) # Window too long: - self.assertRaises( + pytest.raises( ValueError, self.cube.rolling_window, "longitude", @@ -1524,7 +1631,7 @@ def test_different_length_windows(self): window=6, ) # Window too small: - self.assertRaises( + pytest.raises( ValueError, self.cube.rolling_window, "longitude", @@ -1533,7 +1640,7 @@ def test_different_length_windows(self): ) def test_bad_coordinate(self): - self.assertRaises( + pytest.raises( KeyError, self.cube.rolling_window, "wibble", @@ -1549,9 +1656,13 @@ def test_latitude_simple(self): dtype=np.float64, ) - self.assertArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_array_equal(expected_result, res_cube.data) - self.assertCML(res_cube, ("analysis", "rolling_window", "simple_latitude.cml")) + _shared_utils.assert_CML( + self.request, + res_cube, + ("analysis", "rolling_window", "simple_latitude.cml"), + ) def test_mean_with_weights_consistency(self): # equal weights should be the same as the mean with no weights @@ -1562,7 +1673,7 @@ def test_mean_with_weights_consistency(self): expected_result = self.cube.rolling_window( "longitude", iris.analysis.MEAN, window=2 ) - self.assertArrayEqual(expected_result.data, res_cube.data) + _shared_utils.assert_array_equal(expected_result.data, res_cube.data) def test_mean_with_weights(self): # rolling window mean with weights @@ -1574,10 +1685,10 @@ def test_mean_with_weights(self): [[10.2, 13.6], [12.2, 15.6], [12.0, 9.0]], dtype=np.float64 ) # use almost equal to compare floats - self.assertArrayAlmostEqual(expected_result, res_cube.data) + _shared_utils.assert_array_almost_equal(expected_result, res_cube.data) -class TestCreateWeightedAggregatorFn(tests.IrisTest): +class TestCreateWeightedAggregatorFn: @staticmethod def aggregator_fn(data, axis, **kwargs): return (data, axis, kwargs) @@ -1587,20 +1698,20 @@ def test_no_weights_supplied(self): self.aggregator_fn, 42, test_kwarg="test" ) output = aggregator_fn("dummy_array", None) - self.assertEqual(len(output), 3) - self.assertEqual(output[0], "dummy_array") - self.assertEqual(output[1], 42) - self.assertEqual(output[2], {"test_kwarg": "test"}) + assert len(output) == 3 + assert output[0] == "dummy_array" + assert output[1] == 42 + assert output[2] == {"test_kwarg": "test"} def test_weights_supplied(self): aggregator_fn = iris.analysis.create_weighted_aggregator_fn( self.aggregator_fn, 42, test_kwarg="test" ) output = aggregator_fn("dummy_array", "w") - self.assertEqual(len(output), 3) - self.assertEqual(output[0], "dummy_array") - self.assertEqual(output[1], 42) - self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) + assert len(output) == 3 + assert output[0] == "dummy_array" + assert output[1] == 42 + assert output[2] == {"test_kwarg": "test", "weights": "w"} def test_weights_in_kwargs(self): kwargs = {"test_kwarg": "test", "weights": "ignored"} @@ -1608,16 +1719,16 @@ def test_weights_in_kwargs(self): self.aggregator_fn, 42, **kwargs ) output = aggregator_fn("dummy_array", "w") - self.assertEqual(len(output), 3) - self.assertEqual(output[0], "dummy_array") - self.assertEqual(output[1], 42) - self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) - self.assertEqual(kwargs, {"test_kwarg": "test", "weights": "ignored"}) + assert len(output) == 3 + assert output[0] == "dummy_array" + assert output[1] == 42 + assert output[2] == {"test_kwarg": "test", "weights": "w"} + assert kwargs == {"test_kwarg": "test", "weights": "ignored"} class TestWeights: @pytest.fixture(autouse=True) - def setup_test_data(self): + def _setup_test_data(self): self.array_lib = np self.target_type = np.ndarray self.create_test_data() @@ -1672,28 +1783,28 @@ def test_init_with_str_dim_coord(self): # DimCoord always realizes points assert isinstance(weights.array, np.ndarray) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) + _shared_utils.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) assert weights.units == "degrees" def test_init_with_str_aux_coord(self): weights = _Weights("auxcoord", self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) + _shared_utils.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" def test_init_with_str_ancillary_variable(self): weights = _Weights("ancvar", self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) + _shared_utils.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" def test_init_with_str_cell_measure(self): weights = _Weights("cell_area", self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, self.data) + _shared_utils.assert_array_equal(weights.array, self.data) assert weights.units == "m2" def test_init_with_dim_coord(self): @@ -1701,28 +1812,28 @@ def test_init_with_dim_coord(self): # DimCoord always realizes points assert isinstance(weights.array, np.ndarray) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) + _shared_utils.assert_array_equal(weights.array, [[0, 0, 0], [1, 1, 1]]) assert weights.units == "degrees" def test_init_with_aux_coord(self): weights = _Weights(self.aux_coord, self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) + _shared_utils.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" def test_init_with_ancillary_variable(self): weights = _Weights(self.ancillary_variable, self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) + _shared_utils.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" def test_init_with_cell_measure(self): weights = _Weights(self.cell_measure, self.cube) assert isinstance(weights.array, self.target_type) assert isinstance(weights.units, cf_units.Unit) - np.testing.assert_array_equal(weights.array, self.data) + _shared_utils.assert_array_equal(weights.array, self.data) assert weights.units == "m2" def test_init_with_list(self): @@ -1738,7 +1849,7 @@ class TestWeightsLazy(TestWeights): """Repeat tests from ``TestWeights`` with lazy arrays.""" @pytest.fixture(autouse=True) - def setup_test_data(self): + def _setup_test_data(self): self.array_lib = da self.target_type = da.core.Array self.create_test_data() @@ -1756,7 +1867,7 @@ def test__Groupby_repr(): @pytest.mark.parametrize( - "kwargs,expected", + ("kwargs", "expected"), [ ({}, "kg m-2"), ({"test": "m"}, "kg m-2"), @@ -1785,7 +1896,3 @@ def test_sum_units_func(kwargs, expected): # changed if the units have not changed (even when weights units are "1") if result == units: assert result.origin == expected - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 70c1077def..74e0f90d8e 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -3,12 +3,8 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - -import unittest - import numpy as np +import pytest import iris import iris.analysis.calculus @@ -16,18 +12,19 @@ import iris.coords from iris.coords import DimCoord import iris.cube +from iris.tests import _shared_utils import iris.tests.stock -class TestCubeDelta(tests.IrisTest): - @tests.skip_data +class TestCubeDelta: + @_shared_utils.skip_data def test_invalid(self): cube = iris.tests.stock.realistic_4d() - with self.assertRaises(iris.exceptions.CoordinateMultiDimError): + with pytest.raises(iris.exceptions.CoordinateMultiDimError): _ = iris.analysis.calculus.cube_delta(cube, "surface_altitude") - with self.assertRaises(iris.exceptions.CoordinateMultiDimError): + with pytest.raises(iris.exceptions.CoordinateMultiDimError): _ = iris.analysis.calculus.cube_delta(cube, "altitude") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _ = iris.analysis.calculus.cube_delta(cube, "forecast_period") def test_delta_coord_lookup(self): @@ -44,13 +41,13 @@ def test_delta_coord_lookup(self): cube.add_dim_coord(coord, 0) delta = iris.analysis.calculus.cube_delta(cube, "projection_x_coordinate") delta_coord = delta.coord("projection_x_coordinate") - self.assertEqual(delta_coord, delta.coord(coord)) - self.assertEqual(coord, cube.coord(delta_coord)) + assert delta_coord == delta.coord(coord) + assert coord == cube.coord(delta_coord) -class TestDeltaAndMidpoint(tests.IrisTest): +class TestDeltaAndMidpoint: def _simple_filename(self, suffix): - return tests.get_result_path( + return _shared_utils.get_result_path( ("analysis", "delta_and_midpoint", "simple%s.cml" % suffix) ) @@ -61,13 +58,13 @@ def test_simple1_delta_midpoint(self): units="degrees", circular=True, ) - self.assertXMLElement(a, self._simple_filename("1")) + _shared_utils.assert_XML_element(a, self._simple_filename("1")) delta = iris.analysis.calculus._construct_delta_coord(a) - self.assertXMLElement(delta, self._simple_filename("1_delta")) + _shared_utils.assert_XML_element(delta, self._simple_filename("1_delta")) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) - self.assertXMLElement(midpoint, self._simple_filename("1_midpoint")) + _shared_utils.assert_XML_element(midpoint, self._simple_filename("1_midpoint")) def test_simple2_delta_midpoint(self): a = iris.coords.DimCoord( @@ -76,13 +73,13 @@ def test_simple2_delta_midpoint(self): units="degrees", circular=True, ) - self.assertXMLElement(a, self._simple_filename("2")) + _shared_utils.assert_XML_element(a, self._simple_filename("2")) delta = iris.analysis.calculus._construct_delta_coord(a) - self.assertXMLElement(delta, self._simple_filename("2_delta")) + _shared_utils.assert_XML_element(delta, self._simple_filename("2_delta")) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) - self.assertXMLElement(midpoint, self._simple_filename("2_midpoint")) + _shared_utils.assert_XML_element(midpoint, self._simple_filename("2_midpoint")) def test_simple3_delta_midpoint(self): a = iris.coords.DimCoord( @@ -92,13 +89,13 @@ def test_simple3_delta_midpoint(self): circular=True, ) a.guess_bounds(0.5) - self.assertXMLElement(a, self._simple_filename("3")) + _shared_utils.assert_XML_element(a, self._simple_filename("3")) delta = iris.analysis.calculus._construct_delta_coord(a) - self.assertXMLElement(delta, self._simple_filename("3_delta")) + _shared_utils.assert_XML_element(delta, self._simple_filename("3_delta")) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) - self.assertXMLElement(midpoint, self._simple_filename("3_midpoint")) + _shared_utils.assert_XML_element(midpoint, self._simple_filename("3_midpoint")) def test_simple4_delta_midpoint(self): a = iris.coords.AuxCoord( @@ -108,13 +105,13 @@ def test_simple4_delta_midpoint(self): ) a.guess_bounds() b = a.copy() - self.assertXMLElement(b, self._simple_filename("4")) + _shared_utils.assert_XML_element(b, self._simple_filename("4")) delta = iris.analysis.calculus._construct_delta_coord(b) - self.assertXMLElement(delta, self._simple_filename("4_delta")) + _shared_utils.assert_XML_element(delta, self._simple_filename("4_delta")) midpoint = iris.analysis.calculus._construct_midpoint_coord(b) - self.assertXMLElement(midpoint, self._simple_filename("4_midpoint")) + _shared_utils.assert_XML_element(midpoint, self._simple_filename("4_midpoint")) def test_simple5_not_degrees_delta_midpoint(self): # Not sure it makes sense to have a circular coordinate which does not have a modulus but test it anyway. @@ -124,13 +121,13 @@ def test_simple5_not_degrees_delta_midpoint(self): units="meter", circular=True, ) - self.assertXMLElement(a, self._simple_filename("5")) + _shared_utils.assert_XML_element(a, self._simple_filename("5")) delta = iris.analysis.calculus._construct_delta_coord(a) - self.assertXMLElement(delta, self._simple_filename("5_delta")) + _shared_utils.assert_XML_element(delta, self._simple_filename("5_delta")) midpoints = iris.analysis.calculus._construct_midpoint_coord(a) - self.assertXMLElement(midpoints, self._simple_filename("5_midpoint")) + _shared_utils.assert_XML_element(midpoints, self._simple_filename("5_midpoint")) def test_simple6_delta_midpoint(self): a = iris.coords.DimCoord( @@ -140,7 +137,7 @@ def test_simple6_delta_midpoint(self): circular=True, ) midpoints = iris.analysis.calculus._construct_midpoint_coord(a) - self.assertXMLElement(midpoints, self._simple_filename("6")) + _shared_utils.assert_XML_element(midpoints, self._simple_filename("6")) def test_singular_delta(self): # Test single valued coordinate mid-points when circular @@ -149,7 +146,7 @@ def test_singular_delta(self): ) r_expl = iris.analysis.calculus._construct_delta_coord(lon) - self.assertXMLElement( + _shared_utils.assert_XML_element( r_expl, ( "analysis", @@ -160,7 +157,7 @@ def test_singular_delta(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with self.assertRaises(ValueError): + with pytest.raises(ValueError): iris.analysis.calculus._construct_delta_coord(lon) def test_singular_midpoint(self): @@ -170,7 +167,7 @@ def test_singular_midpoint(self): ) r_expl = iris.analysis.calculus._construct_midpoint_coord(lon) - self.assertXMLElement( + _shared_utils.assert_XML_element( r_expl, ( "analysis", @@ -181,12 +178,13 @@ def test_singular_midpoint(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with self.assertRaises(ValueError): + with pytest.raises(ValueError): iris.analysis.calculus._construct_midpoint_coord(lon) -class TestCoordTrig(tests.IrisTest): - def setUp(self): +class TestCoordTrig: + @pytest.fixture(autouse=True) + def _setup(self): points = np.arange(20, dtype=np.float32) * 2.3 bounds = np.concatenate([[points - 0.5 * 2.3], [points + 0.5 * 2.3]]).T self.lat = iris.coords.AuxCoord( @@ -204,41 +202,41 @@ def test_sin(self): sin_of_coord_radians = iris.analysis.calculus._coord_sin(self.rlat) # Check the values are correct (within a tolerance) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( np.sin(self.rlat.points), sin_of_coord.points ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( np.sin(self.rlat.bounds), sin_of_coord.bounds ) # Check that the results of the sin function are almost equal when operating on a coord with degrees and radians - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( sin_of_coord.points, sin_of_coord_radians.points ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( sin_of_coord.bounds, sin_of_coord_radians.bounds ) - self.assertEqual(sin_of_coord.name(), "sin(latitude)") - self.assertEqual(sin_of_coord.units, "1") + assert sin_of_coord.name() == "sin(latitude)" + assert sin_of_coord.units == "1" def test_cos(self): cos_of_coord = iris.analysis.calculus._coord_cos(self.lat) cos_of_coord_radians = iris.analysis.calculus._coord_cos(self.rlat) # Check the values are correct (within a tolerance) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( np.cos(self.rlat.points), cos_of_coord.points ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( np.cos(self.rlat.bounds), cos_of_coord.bounds ) # Check that the results of the cos function are almost equal when operating on a coord with degrees and radians - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( cos_of_coord.points, cos_of_coord_radians.points ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( cos_of_coord.bounds, cos_of_coord_radians.bounds ) @@ -248,15 +246,19 @@ def test_cos(self): points=np.array([1], dtype=np.float32) ) - self.assertXMLElement(cos_of_coord, ("analysis", "calculus", "cos_simple.xml")) - self.assertXMLElement( + _shared_utils.assert_XML_element( + cos_of_coord, ("analysis", "calculus", "cos_simple.xml") + ) + _shared_utils.assert_XML_element( cos_of_coord_radians, ("analysis", "calculus", "cos_simple_radians.xml"), ) -class TestCalculusSimple3(tests.IrisTest): - def setUp(self): +class TestCalculusSimple3: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request data = np.arange(2500, dtype=np.float32).reshape(50, 50) cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h") @@ -285,15 +287,21 @@ def setUp(self): def test_diff_wrt_lon(self): t = iris.analysis.calculus.differentiate(self.cube, "longitude") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade2_wrt_lon.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade2_wrt_lon.cml") + ) def test_diff_wrt_lat(self): t = iris.analysis.calculus.differentiate(self.cube, "latitude") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade2_wrt_lat.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade2_wrt_lat.cml") + ) -class TestCalculusSimple2(tests.IrisTest): - def setUp(self): +class TestCalculusSimple2: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request data = np.array( [ [1, 2, 3, 4, 5], @@ -344,47 +352,57 @@ def setUp(self): def test_diff_wrt_x(self): t = iris.analysis.calculus.differentiate(self.cube, "x") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_x.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade_wrt_x.cml") + ) def test_diff_wrt_y(self): t = iris.analysis.calculus.differentiate(self.cube, "y") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_y.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade_wrt_y.cml") + ) def test_diff_wrt_lon(self): t = iris.analysis.calculus.differentiate(self.cube, "longitude") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_lon.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade_wrt_lon.cml") + ) def test_diff_wrt_lat(self): t = iris.analysis.calculus.differentiate(self.cube, "latitude") - self.assertCMLApproxData(t, ("analysis", "calculus", "handmade_wrt_lat.cml")) + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade_wrt_lat.cml") + ) def test_delta_wrt_x(self): t = iris.analysis.calculus.cube_delta(self.cube, "x") - self.assertCMLApproxData( - t, ("analysis", "calculus", "delta_handmade_wrt_x.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "delta_handmade_wrt_x.cml") ) def test_delta_wrt_y(self): t = iris.analysis.calculus.cube_delta(self.cube, "y") - self.assertCMLApproxData( - t, ("analysis", "calculus", "delta_handmade_wrt_y.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "delta_handmade_wrt_y.cml") ) def test_delta_wrt_lon(self): t = iris.analysis.calculus.cube_delta(self.cube, "longitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "delta_handmade_wrt_lon.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "delta_handmade_wrt_lon.cml") ) def test_delta_wrt_lat(self): t = iris.analysis.calculus.cube_delta(self.cube, "latitude") - self.assertCMLApproxData( - t, ("analysis", "calculus", "delta_handmade_wrt_lat.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "delta_handmade_wrt_lat.cml") ) -class TestCalculusSimple1(tests.IrisTest): - def setUp(self): +class TestCalculusSimple1: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request data = np.array( [ [1, 2, 3, 4, 5], @@ -410,14 +428,14 @@ def setUp(self): def test_diff_wrt_x(self): t = iris.analysis.calculus.differentiate(self.cube, "x") - self.assertCMLApproxData( - t, ("analysis", "calculus", "handmade_simple_wrt_x.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "handmade_simple_wrt_x.cml") ) def test_delta_wrt_x(self): t = iris.analysis.calculus.cube_delta(self.cube, "x") - self.assertCMLApproxData( - t, ("analysis", "calculus", "delta_handmade_simple_wrt_x.cml") + _shared_utils.assert_CML_approx_data( + self.request, t, ("analysis", "calculus", "delta_handmade_simple_wrt_x.cml") ) @@ -501,7 +519,11 @@ def build_cube(data, spherical=False): return cube -class TestCalculusWKnownSolutions(tests.IrisTest): +class TestCalculusWKnownSolutions: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def get_coord_pts(self, cube): """Return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones) for the given cube.""" x = cube.coord(axis="X") @@ -568,7 +590,7 @@ def test_contrived_differential1(self): data = -sin_x_pts * y_ones result = df_dlon.copy(data=data) - np.testing.assert_array_almost_equal(result.data, df_dlon.data, decimal=3) + _shared_utils.assert_array_almost_equal(result.data, df_dlon.data, decimal=3) def test_contrived_differential2(self): # testing : @@ -585,7 +607,7 @@ def test_contrived_differential2(self): x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(r) result = r.copy(data=y_pts * 2.0 * x_ones * z_ones) - np.testing.assert_array_almost_equal(result.data, r.data, decimal=6) + _shared_utils.assert_array_almost_equal(result.data, r.data, decimal=6) def test_contrived_non_spherical_curl1(self): # testing : @@ -604,15 +626,16 @@ def test_contrived_non_spherical_curl1(self): r = iris.analysis.calculus.curl(u, v) # Curl returns None when there is no components of Curl - self.assertEqual(r[0], None) - self.assertEqual(r[1], None) + assert r[0] is None + assert r[1] is None cube = r[2] - self.assertCML( + _shared_utils.assert_CML( + self.request, cube, ("analysis", "calculus", "grad_contrived_non_spherical1.cml"), checksum=False, ) - self.assertTrue(np.all(np.abs(cube.data - (-1.0)) < 1.0e-7)) + assert np.all(np.abs(cube.data - (-1.0)) < 1.0e-7) def test_contrived_non_spherical_curl2(self): # testing : @@ -639,18 +662,19 @@ def test_contrived_non_spherical_curl2(self): # result.data = y_pts * 2. * x_ones * z_ones # print(repr(r[0].data[0:1, 0:5, 0:25:5])) # print(repr(result.data[0:1, 0:5, 0:25:5])) - # np.testing.assert_array_almost_equal(result.data, r[0].data, decimal=2) + # _shared_utils.assert_array_almost_equal(result.data, r[0].data, decimal=2) # # result = r[1].copy(data=True) # x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(result) # result.data = pow(z_pts, 2) * x_ones * y_ones - # np.testing.assert_array_almost_equal(result.data, r[1].data, decimal=6) + # _shared_utils.assert_array_almost_equal(result.data, r[1].data, decimal=6) result = r[2].copy() result.data = result.data * 0 + 1 - np.testing.assert_array_almost_equal(result.data, r[2].data, decimal=4) + _shared_utils.assert_array_almost_equal(result.data, r[2].data, decimal=4) - self.assertCML( + _shared_utils.assert_CML( + self.request, r, ("analysis", "calculus", "curl_contrived_cartesian2.cml"), checksum=False, @@ -681,11 +705,14 @@ def test_contrived_spherical_curl1(self): result = r.copy(data=r.data * 0) # Note: This numerical comparison was created when the radius was 1000 times smaller - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result.data[5:-5], r.data[5:-5] / 1000.0, decimal=1 ) - self.assertCML( - r, ("analysis", "calculus", "grad_contrived1.cml"), checksum=False + _shared_utils.assert_CML( + self.request, + r, + ("analysis", "calculus", "grad_contrived1.cml"), + checksum=False, ) def test_contrived_spherical_curl2(self): @@ -730,22 +757,25 @@ def test_contrived_spherical_curl2(self): result = r.copy(data=-2 * cos_x_pts * cos_y_pts) # Note: This numerical comparison was created when the radius was 1000 times smaller - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result.data[30:-30, :], r.data[30:-30, :] / 1000.0, decimal=1 ) - self.assertCML( - r, ("analysis", "calculus", "grad_contrived2.cml"), checksum=False + _shared_utils.assert_CML( + self.request, + r, + ("analysis", "calculus", "grad_contrived2.cml"), + checksum=False, ) -class TestCurlInterface(tests.IrisTest): +class TestCurlInterface: def test_non_conformed(self): u = build_cube(np.empty((50, 20)), spherical=True) v = u.copy() y = v.coord("latitude") y.points = y.points + 5 - self.assertRaises(ValueError, iris.analysis.calculus.curl, u, v) + pytest.raises(ValueError, iris.analysis.calculus.curl, u, v) def test_standard_name(self): nx = 20 @@ -758,26 +788,26 @@ def test_standard_name(self): w.rename("w_wind") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v) - self.assertEqual(r, (("u", "v", "w"), "wind")) + assert r == (("u", "v", "w"), "wind") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w) - self.assertEqual(r, (("u", "v", "w"), "wind")) + assert r == (("u", "v", "w"), "wind") - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, None, w, ) - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, None, None, w, ) - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, None, @@ -789,22 +819,22 @@ def test_standard_name(self): v.rename("y foobar wibble") w.rename("z foobar wibble") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v) - self.assertEqual(r, (("x", "y", "z"), "foobar wibble")) + assert r == (("x", "y", "z"), "foobar wibble") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w) - self.assertEqual(r, (("x", "y", "z"), "foobar wibble")) + assert r == (("x", "y", "z"), "foobar wibble") u.rename("wibble foobar") v.rename("wobble foobar") w.rename("tipple foobar") # r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w) #should raise a Value Error... - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, v, ) - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, @@ -816,14 +846,14 @@ def test_standard_name(self): v.rename("northward_foobar") w.rename("upward_foobar") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v) - self.assertEqual(r, (("eastward", "northward", "upward"), "foobar")) + assert r == (("eastward", "northward", "upward"), "foobar") r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w) - self.assertEqual(r, (("eastward", "northward", "upward"), "foobar")) + assert r == (("eastward", "northward", "upward"), "foobar") # Change it to have an inconsistent phenomenon v.rename("northward_foobar2") - self.assertRaises( + pytest.raises( ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, @@ -837,8 +867,4 @@ def test_rotated_pole(self): v.rename("v_wind") x, y, z = iris.analysis.calculus.curl(u, v) - self.assertEqual(z.coord_system(), u.coord_system()) - - -if __name__ == "__main__": - unittest.main() + assert z.coord_system() == u.coord_system() diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py index 908ed90bf8..16e2799f1d 100644 --- a/lib/iris/tests/test_lazy_aggregate_by.py +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -2,18 +2,19 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -import unittest +import pytest from iris._lazy_data import as_lazy_data from iris.analysis import SUM +from iris.cube import Cube from iris.tests import test_aggregate_by # Simply redo the tests of test_aggregate_by.py with lazy data class TestLazyAggregateBy(test_aggregate_by.TestAggregateBy): - def setUp(self): - super().setUp() - + @pytest.fixture(autouse=True) + def _setup_subclass(self, _setup): + # Requests _setup to ensure this fixture runs AFTER _setup. self.cube_single.data = as_lazy_data(self.cube_single.data) self.cube_multi.data = as_lazy_data(self.cube_multi.data) self.cube_single_masked.data = as_lazy_data(self.cube_single_masked.data) @@ -21,28 +22,46 @@ def setUp(self): self.cube_easy.data = as_lazy_data(self.cube_easy.data) self.cube_easy_weighted.data = as_lazy_data(self.cube_easy_weighted.data) - assert self.cube_single.has_lazy_data() - assert self.cube_multi.has_lazy_data() - assert self.cube_single_masked.has_lazy_data() - assert self.cube_multi_masked.has_lazy_data() - assert self.cube_easy.has_lazy_data() - assert self.cube_easy_weighted.has_lazy_data() - - def tearDown(self): - super().tearDown() + @pytest.fixture(autouse=True) + def _lazy_checks(self, _setup_subclass): + # Requests _setup_subclass to ensure this fixture runs AFTER _setup_subclass. + # TODO: ASSERTS IN FIXTURES ARE AN ANTIPATTERN, find an alternative. + # https://github.com/m-burst/flake8-pytest-style/issues/31 + # (have given this a few hours without success, something to revisit). + def _checker(cubes: list[Cube]): + for cube in cubes: + assert cube.has_lazy_data() + + _checker( + [ + self.cube_single, + self.cube_multi, + self.cube_single_masked, + self.cube_multi_masked, + self.cube_easy, + self.cube_easy_weighted, + ] + ) - # Note: weighted easy cube is not expected to have lazy data since - # WPERCENTILE is not lazy. - assert self.cube_single.has_lazy_data() - assert self.cube_multi.has_lazy_data() - assert self.cube_single_masked.has_lazy_data() - assert self.cube_multi_masked.has_lazy_data() - assert self.cube_easy.has_lazy_data() + yield + + _checker( + [ + self.cube_single, + self.cube_multi, + self.cube_single_masked, + self.cube_multi_masked, + self.cube_easy, + # Note: weighted easy cube is not expected to have lazy data since + # WPERCENTILE is not lazy. + ] + ) class TestLazyAggregateByWeightedByCube(TestLazyAggregateBy): - def setUp(self): - super().setUp() + @pytest.fixture(autouse=True) + def _setup_sub2(self, _setup_subclass): + # Requests _setup_subclass to ensure this fixture runs AFTER _setup_subclass. self.weights_single = self.cube_single[:, 0, 0].copy(self.weights_single) self.weights_single.units = "m2" @@ -55,7 +74,7 @@ def test_str_aggregation_weighted_sum_single(self): SUM, weights=self.weights_single, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_weighted_sum_single(self): aggregateby_cube = self.cube_single.aggregated_by( @@ -63,7 +82,7 @@ def test_coord_aggregation_weighted_sum_single(self): SUM, weights=self.weights_single, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_str_aggregation_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -71,7 +90,7 @@ def test_str_aggregation_weighted_sum_multi(self): SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_str_aggregation_rev_order_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -79,7 +98,7 @@ def test_str_aggregation_rev_order_weighted_sum_multi(self): SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -87,7 +106,7 @@ def test_coord_aggregation_weighted_sum_multi(self): SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") + assert aggregateby_cube.units == "kelvin m2" def test_coord_aggregation_rev_order_weighted_sum_multi(self): aggregateby_cube = self.cube_multi.aggregated_by( @@ -95,8 +114,4 @@ def test_coord_aggregation_rev_order_weighted_sum_multi(self): SUM, weights=self.weights_multi, ) - self.assertEqual(aggregateby_cube.units, "kelvin m2") - - -if __name__ == "__main__": - unittest.main() + assert aggregateby_cube.units == "kelvin m2" diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index 4f59bf8d31..2c3c2fc0a0 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -4,21 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Tests map creation.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import cartopy.crs as ccrs import numpy as np -import numpy.testing as np_testing +import pytest import iris import iris.coord_systems import iris.cube +from iris.tests import _shared_utils import iris.tests.stock # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt import iris.plot as iplt @@ -30,11 +27,11 @@ ) -@tests.skip_plot -@tests.skip_data -class TestBasic(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +@_shared_utils.skip_data +class TestBasic(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.tests.stock.realistic_4d() def test_contourf(self): @@ -54,23 +51,22 @@ def test_unmappable(self): self.check_graphic() def test_default_projection_and_extent(self): - self.assertEqual( - iplt.default_projection(self.cube), - ccrs.RotatedPole(357.5 - 180, 37.5, globe=_DEFAULT_GLOBE), + assert iplt.default_projection(self.cube) == ccrs.RotatedPole( + 357.5 - 180, 37.5, globe=_DEFAULT_GLOBE ) - np_testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( iplt.default_projection_extent(self.cube), (3.59579163e02, 3.59669159e02, -1.28250003e-01, -3.82499993e-02), decimal=3, ) -@tests.skip_data -@tests.skip_plot -class TestUnmappable(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestUnmappable(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): src_cube = iris.tests.stock.global_pp() # Make a cube that can't be located on the globe. @@ -96,12 +92,14 @@ def test_simple(self): self.check_graphic() -@tests.skip_data -@tests.skip_plot -class TestMappingSubRegion(tests.GraphicsTest): - def setUp(self): - super().setUp() - cube_path = tests.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestMappingSubRegion(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): + cube_path = _shared_utils.get_data_path( + ("PP", "aPProt1", "rotatedMHtimecube.pp") + ) cube = iris.load_cube(cube_path)[0] # make the data smaller to speed things up. self.cube = cube[::10, ::10] @@ -135,22 +133,21 @@ def test_simple(self): self.check_graphic() def test_default_projection_and_extent(self): - self.assertEqual( - iplt.default_projection(self.cube), - ccrs.RotatedPole(357.5 - 180, 37.5, globe=_DEFAULT_GLOBE), + assert iplt.default_projection(self.cube) == ccrs.RotatedPole( + 357.5 - 180, 37.5, globe=_DEFAULT_GLOBE ) - np_testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( iplt.default_projection_extent(self.cube), (313.01998901, 391.11999512, -22.48999977, 24.80999947), ) -@tests.skip_data -@tests.skip_plot -class TestLowLevel(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestLowLevel(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.tests.stock.global_pp() self.few = 4 self.few_levels = list(range(280, 300, 5)) @@ -178,11 +175,11 @@ def test_keywords(self): self.check_graphic() -@tests.skip_data -@tests.skip_plot -class TestBoundedCube(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestBoundedCube(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.tests.stock.global_pp() # Add some bounds to this data (this will actually make the bounds # invalid as they will straddle the north pole and overlap on the @@ -202,28 +199,25 @@ def test_grid(self): self.check_graphic() def test_default_projection_and_extent(self): - self.assertEqual( - iplt.default_projection(self.cube), - ccrs.PlateCarree( - globe=self.cube.coord_system("CoordSystem").as_cartopy_globe() - ), + assert iplt.default_projection(self.cube) == ccrs.PlateCarree( + globe=self.cube.coord_system("CoordSystem").as_cartopy_globe() ) - np_testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( iplt.default_projection_extent(self.cube), [0.0, 360.0, -89.99995422, 89.99998474], ) - np_testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( iplt.default_projection_extent(self.cube, mode=iris.coords.BOUND_MODE), [-1.875046, 358.124954, -90, 90], ) -@tests.skip_data -@tests.skip_plot -class TestLimitedAreaCube(tests.GraphicsTest): - def setUp(self): - super().setUp() - cube_path = tests.get_data_path(("PP", "aPProt1", "rotated.pp")) +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestLimitedAreaCube(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): + cube_path = _shared_utils.get_data_path(("PP", "aPProt1", "rotated.pp")) self.cube = iris.load_cube(cube_path)[::20, ::20] self.cube.coord("grid_latitude").guess_bounds() self.cube.coord("grid_longitude").guess_bounds() @@ -240,7 +234,3 @@ def test_scatter(self): iplt.points(self.cube) plt.gca().coastlines("110m") self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index b263313b90..f68a9cf32a 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -3,24 +3,20 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from functools import wraps -import types -import warnings +from contextlib import nullcontext import cf_units import numpy as np +import pytest import iris import iris.analysis import iris.coords as coords +from iris.tests import _shared_utils import iris.tests.stock # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt import iris.plot as iplt @@ -28,7 +24,7 @@ import iris.symbols -@tests.skip_data +@_shared_utils.skip_data def simple_cube(): cube = iris.tests.stock.realistic_4d() cube = cube[:, 0, 0, :] @@ -36,8 +32,8 @@ def simple_cube(): return cube -@tests.skip_plot -class TestSimple(tests.GraphicsTest): +@_shared_utils.skip_plot +class TestSimple(_shared_utils.GraphicsTest): def test_points(self): cube = simple_cube() qplt.contourf(cube) @@ -49,8 +45,8 @@ def test_bounds(self): self.check_graphic() -@tests.skip_plot -class TestMissingCoord(tests.GraphicsTest): +@_shared_utils.skip_plot +class TestMissingCoord(_shared_utils.GraphicsTest): def _check(self, cube): qplt.contourf(cube) self.check_graphic() @@ -75,12 +71,12 @@ def test_none(self): self._check(cube) -@tests.skip_data -@tests.skip_plot -class TestMissingCS(tests.GraphicsTest): - @tests.skip_data +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestMissingCS(_shared_utils.GraphicsTest): + @_shared_utils.skip_data def test_missing_cs(self): - cube = tests.stock.simple_pp() + cube = iris.tests.stock.simple_pp() cube.coord("latitude").coord_system = None cube.coord("longitude").coord_system = None qplt.contourf(cube) @@ -88,11 +84,11 @@ def test_missing_cs(self): self.check_graphic() -@tests.skip_plot -@tests.skip_data -class TestHybridHeight(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +@_shared_utils.skip_data +class TestHybridHeight(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.tests.stock.realistic_4d()[0, :15, 0, :] def _check(self, plt_method, test_altitude=True): @@ -131,23 +127,25 @@ def test_orography(self): self.check_graphic() # TODO: Test bounds once they are supported. - with self.assertRaises(NotImplementedError): - qplt.pcolor(self.cube) + qplt.pcolor(self.cube) + with pytest.raises(NotImplementedError): iplt.orography_at_bounds(self.cube) - iplt.outline(self.cube) - self.check_graphic() + # iplt.outline(self.cube) + # self.check_graphic() -@tests.skip_plot -@tests.skip_data -class Test1dPlotMultiArgs(tests.GraphicsTest): +@_shared_utils.skip_plot +@_shared_utils.skip_data +class Test1dPlotMultiArgs(_shared_utils.GraphicsTest): # tests for iris.plot using multi-argument calling convention - - def setUp(self): - super().setUp() - self.cube1d = _load_4d_testcube()[0, :, 0, 0] + @pytest.fixture(autouse=True) + def _set_draw_method(self): self.draw_method = iplt.plot + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.cube1d = load_4d_testcube[0, :, 0, 0] + def test_cube(self): # just plot a cube against its dim coord self.draw_method(self.cube1d) # altitude vs temp @@ -204,50 +202,51 @@ def test_cube_cube(self): def test_incompatible_objects(self): # incompatible objects (not the same length) should raise an error - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not compatible"): self.draw_method(self.cube1d.coord("time"), (self.cube1d)) - def test_multimidmensional(self): + def test_multimidmensional(self, load_4d_testcube): # multidimensional cubes are not allowed - cube = _load_4d_testcube()[0, :, :, 0] - with self.assertRaises(ValueError): + cube = load_4d_testcube[0, :, :, 0] + with pytest.raises(ValueError, match="must be 1-dimensional"): self.draw_method(cube) def test_not_cube_or_coord(self): # inputs must be cubes or coordinates, otherwise an error should be # raised xdim = np.arange(self.cube1d.shape[0]) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.draw_method(xdim, self.cube1d) def test_plot_old_coords_kwarg(self): # Coords used to be a valid kwarg to plot, but it was deprecated and # we are maintaining a reasonable exception, check that it is raised # here. - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.draw_method(self.cube1d, coords=None) -@tests.skip_plot +@_shared_utils.skip_plot class Test1dQuickplotPlotMultiArgs(Test1dPlotMultiArgs): # tests for iris.plot using multi-argument calling convention - - def setUp(self): - tests.GraphicsTest.setUp(self) - self.cube1d = _load_4d_testcube()[0, :, 0, 0] + @pytest.fixture(autouse=True) + def _set_draw_method(self): self.draw_method = qplt.plot -@tests.skip_data -@tests.skip_plot -class Test1dScatter(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class Test1dScatter(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _set_draw_method(self): + self.draw_method = iplt.scatter + + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.load_cube( - tests.get_data_path(("NAME", "NAMEIII_trajectory.txt")), + _shared_utils.get_data_path(("NAME", "NAMEIII_trajectory.txt")), "Temperature", ) - self.draw_method = iplt.scatter def test_coord_coord(self): x = self.cube.coord("longitude") @@ -280,7 +279,7 @@ def test_cube_coord(self): def test_cube_cube(self): x = iris.load_cube( - tests.get_data_path(("NAME", "NAMEIII_trajectory.txt")), + _shared_utils.get_data_path(("NAME", "NAMEIII_trajectory.txt")), "Rel Humidity", ) y = self.cube @@ -292,42 +291,38 @@ def test_incompatible_objects(self): # cubes/coordinates of different sizes cannot be plotted x = self.cube y = self.cube.coord("altitude")[:-1] - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not compatible"): self.draw_method(x, y) - def test_multidimensional(self): + def test_multidimensional(self, load_4d_testcube): # multidimensional cubes/coordinates are not allowed - x = _load_4d_testcube()[0, :, :, 0] + x = load_4d_testcube[0, :, :, 0] y = x.coord("model_level_number") - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="must be 1-dimensional"): self.draw_method(x, y) def test_not_cube_or_coord(self): # inputs must be cubes or coordinates x = np.arange(self.cube.shape[0]) y = self.cube - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.draw_method(x, y) -@tests.skip_data -@tests.skip_plot +@_shared_utils.skip_data +@_shared_utils.skip_plot class Test1dQuickplotScatter(Test1dScatter): - def setUp(self): - tests.GraphicsTest.setUp(self) - self.cube = iris.load_cube( - tests.get_data_path(("NAME", "NAMEIII_trajectory.txt")), - "Temperature", - ) + @pytest.fixture(autouse=True) + def _set_draw_method(self): self.draw_method = qplt.scatter -@tests.skip_data -@tests.skip_plot -class Test2dPoints(tests.GraphicsTest): - def setUp(self): - super().setUp() - pp_file = tests.get_data_path(("PP", "globClim1", "u_wind.pp")) +@_shared_utils.skip_data +@_shared_utils.skip_plot +class Test2dPoints(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): + pp_file = _shared_utils.get_data_path(("PP", "globClim1", "u_wind.pp")) self.cube = iris.load(pp_file)[0][0] def test_circular_changes(self): @@ -339,16 +334,19 @@ def test_circular_changes(self): self.check_graphic() -@tests.skip_data -@tests.skip_plot -class Test1dFillBetween(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class Test1dFillBetween(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _set_draw_method(self): + self.draw_method = iplt.fill_between + + @pytest.fixture(autouse=True) + def _setup(self): self.cube = iris.load_cube( - tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")), + _shared_utils.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")), "air_potential_temperature", )[0, 0] - self.draw_method = iplt.fill_between def test_coord_coord(self): x = self.cube.coord("grid_latitude") @@ -383,7 +381,7 @@ def test_incompatible_objects_x_odd(self): x = self.cube.coord("grid_latitude")[:-1] y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not all compatible"): self.draw_method(x, y1, y2) def test_incompatible_objects_y1_odd(self): @@ -391,7 +389,7 @@ def test_incompatible_objects_y1_odd(self): x = self.cube.coord("grid_latitude") y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN)[:-1] y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not all compatible"): self.draw_method(x, y1, y2) def test_incompatible_objects_y2_odd(self): @@ -399,7 +397,7 @@ def test_incompatible_objects_y2_odd(self): x = self.cube.coord("grid_latitude") y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX)[:-1] - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not all compatible"): self.draw_method(x, y1, y2) def test_incompatible_objects_all_odd(self): @@ -407,7 +405,7 @@ def test_incompatible_objects_all_odd(self): x = self.cube.coord("grid_latitude") y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN)[:-1] y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX)[:-2] - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="are not all compatible"): self.draw_method(x, y1, y2) def test_multidimensional(self): @@ -415,7 +413,7 @@ def test_multidimensional(self): x = self.cube.coord("grid_latitude") y1 = self.cube y2 = self.cube - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="must be 1-dimensional"): self.draw_method(x, y1, y2) def test_not_cube_or_coord(self): @@ -423,64 +421,55 @@ def test_not_cube_or_coord(self): x = np.arange(self.cube.shape[0]) y1 = self.cube.collapsed("grid_longitude", iris.analysis.MIN) y2 = self.cube.collapsed("grid_longitude", iris.analysis.MAX) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.draw_method(x, y1, y2) -@tests.skip_data -@tests.skip_plot +@_shared_utils.skip_data +@_shared_utils.skip_plot class Test1dQuickplotFillBetween(Test1dFillBetween): - def setUp(self): - tests.GraphicsTest.setUp(self) - self.cube = iris.load_cube( - tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")), - "air_potential_temperature", - )[0, 0] + @pytest.fixture(autouse=True) + def _set_draw_method(self): self.draw_method = qplt.fill_between -@tests.skip_data -@tests.skip_plot -class TestAttributePositive(tests.GraphicsTest): +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestAttributePositive(_shared_utils.GraphicsTest): def test_1d_positive_up(self): - path = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) + path = _shared_utils.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) cube = iris.load_cube(path) qplt.plot(cube.coord("depth"), cube[0, :, 60, 80]) self.check_graphic() def test_1d_positive_down(self): - path = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) + path = _shared_utils.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) cube = iris.load_cube(path) qplt.plot(cube[0, :, 60, 80], cube.coord("depth")) self.check_graphic() def test_2d_positive_up(self): - path = tests.get_data_path(("NetCDF", "testing", "small_theta_colpex.nc")) + path = _shared_utils.get_data_path( + ("NetCDF", "testing", "small_theta_colpex.nc") + ) cube = iris.load_cube(path, "air_potential_temperature")[0, :, 42, :] qplt.pcolormesh(cube) self.check_graphic() def test_2d_positive_down(self): - path = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) + path = _shared_utils.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) cube = iris.load_cube(path)[0, :, 42, :] qplt.pcolormesh(cube) self.check_graphic() -# Caches _load_4d_testcube so subsequent calls are faster -def cache(fn, cache={}): - def inner(*args, **kwargs): - key = fn.__name__ - if key not in cache: - cache[key] = fn(*args, **kwargs) - return cache[key] - - return inner +@_shared_utils.skip_data +@pytest.fixture(scope="module") +def load_4d_testcube(): + """Load the realistic_4d() cube with specific modifications. - -@cache -@tests.skip_data -def _load_4d_testcube(): + Scoped to only load once - used many times so this is much faster. + """ # Load example 4d data (TZYX). test_cube = iris.tests.stock.realistic_4d() # Replace forecast_period coord with a multi-valued version. @@ -507,10 +496,15 @@ def _load_4d_testcube(): return test_cube -@cache -def _load_wind_no_bounds(): +@_shared_utils.skip_data +@pytest.fixture(scope="module") +def load_wind_no_bounds(): + """Load a cube representing wind data but with no coordinate bounds. + + Scoped to only load once - used many times so this is much faster. + """ # Load the COLPEX data => TZYX - path = tests.get_data_path(("PP", "COLPEX", "small_eastward_wind.pp")) + path = _shared_utils.get_data_path(("PP", "COLPEX", "small_eastward_wind.pp")) wind = iris.load_cube(path, "x_wind") # Remove bounds from all coords that have them. @@ -538,7 +532,7 @@ def _date_series(src_cube): return cube -@tests.skip_plot +@_shared_utils.skip_plot class SliceMixin: """Mixin class providing tests for each 2-dimensional permutation of axes. @@ -546,184 +540,123 @@ class SliceMixin: and self.results to be a dictionary containing the desired test results. """ + @pytest.fixture(autouse=True) + def _set_warnings_stance(self): + # Defining in a fixture enables inheritance by classes that expect a + # warning - setting self.warning_checker to the pytest.warns() context + # manager instead. + self.warning_checker = nullcontext + def test_yx(self): cube = self.wind[0, 0, :, :] - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() def test_zx(self): cube = self.wind[0, :, 0, :] - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() def test_tx(self): cube = _time_series(self.wind[:, 0, 0, :]) - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() def test_zy(self): cube = self.wind[0, :, :, 0] - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() def test_ty(self): cube = _time_series(self.wind[:, 0, :, 0]) - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() def test_tz(self): cube = _time_series(self.wind[:, :, 0, 0]) - self.draw_method(cube) + with self.warning_checker(UserWarning): + self.draw_method(cube) self.check_graphic() -@tests.skip_data -class TestContour(tests.GraphicsTest, SliceMixin): +@_shared_utils.skip_data +class TestContour(_shared_utils.GraphicsTest, SliceMixin): """Test the iris.plot.contour routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = iplt.contour -@tests.skip_data -class TestContourf(tests.GraphicsTest, SliceMixin): +@_shared_utils.skip_data +class TestContourf(_shared_utils.GraphicsTest, SliceMixin): """Test the iris.plot.contourf routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = iplt.contourf -@tests.skip_data -class TestPcolor(tests.GraphicsTest, SliceMixin): +@_shared_utils.skip_data +class TestPcolor(_shared_utils.GraphicsTest, SliceMixin): """Test the iris.plot.pcolor routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = iplt.pcolor -@tests.skip_data -class TestPcolormesh(tests.GraphicsTest, SliceMixin): +@_shared_utils.skip_data +class TestPcolormesh(_shared_utils.GraphicsTest, SliceMixin): """Test the iris.plot.pcolormesh routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = iplt.pcolormesh -def check_warnings(method): - """Decorator that adds a catch_warnings and filter to assert - the method being decorated issues a UserWarning. - - """ - - @wraps(method) - def decorated_method(self, *args, **kwargs): - # Force reset of iris.coords warnings registry to avoid suppression of - # repeated warnings. warnings.resetwarnings() does not do this. - if hasattr(coords, "__warningregistry__"): - coords.__warningregistry__.clear() - - # Check that method raises warning. - with warnings.catch_warnings(): - warnings.simplefilter("error") - with self.assertRaises(UserWarning): - return method(self, *args, **kwargs) - - return decorated_method - +class SliceWarningsMixin(SliceMixin): + @pytest.fixture(autouse=True) + def _set_warnings_stance(self): + self.warning_checker = pytest.warns -def ignore_warnings(method): - """Decorator that adds a catch_warnings and filter to suppress - any warnings issues by the method being decorated. - - """ - - @wraps(method) - def decorated_method(self, *args, **kwargs): - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - return method(self, *args, **kwargs) - - return decorated_method - - -class CheckForWarningsMetaclass(type): - """Metaclass that adds a further test for each base class test - that checks that each test raises a UserWarning. Each base - class test is then overridden to ignore warnings in order to - check the underlying functionality. - - """ - def __new__(cls, name, bases, local): - def add_decorated_methods(attr_dict, target_dict, decorator): - for key, value in attr_dict.items(): - if isinstance(value, types.FunctionType) and key.startswith("test"): - new_key = "_".join((key, decorator.__name__)) - if new_key not in target_dict: - wrapped = decorator(value) - wrapped.__name__ = new_key - target_dict[new_key] = wrapped - else: - raise RuntimeError( - "A attribute called {!r} already exists.".format(new_key) - ) - - def override_with_decorated_methods(attr_dict, target_dict, decorator): - for key, value in attr_dict.items(): - if isinstance(value, types.FunctionType) and key.startswith("test"): - target_dict[key] = decorator(value) - - # Add decorated versions of base methods - # to check for warnings. - for base in bases: - add_decorated_methods(base.__dict__, local, check_warnings) - - # Override base methods to ignore warnings. - for base in bases: - override_with_decorated_methods(base.__dict__, local, ignore_warnings) - - return type.__new__(cls, name, bases, local) - - -@tests.skip_data -class TestPcolorNoBounds( - tests.GraphicsTest, SliceMixin, metaclass=CheckForWarningsMetaclass -): +@_shared_utils.skip_data +class TestPcolorNoBounds(_shared_utils.GraphicsTest, SliceWarningsMixin): """Test the iris.plot.pcolor routine on a cube with coordinates that have no bounds. """ - def setUp(self): - super().setUp() - self.wind = _load_wind_no_bounds() + @pytest.fixture(autouse=True) + def _setup(self, load_wind_no_bounds): + self.wind = load_wind_no_bounds self.draw_method = iplt.pcolor -@tests.skip_data -class TestPcolormeshNoBounds( - tests.GraphicsTest, SliceMixin, metaclass=CheckForWarningsMetaclass -): +@_shared_utils.skip_data +class TestPcolormeshNoBounds(_shared_utils.GraphicsTest, SliceWarningsMixin): """Test the iris.plot.pcolormesh routine on a cube with coordinates that have no bounds. """ - def setUp(self): - super().setUp() - self.wind = _load_wind_no_bounds() + @pytest.fixture(autouse=True) + def _setup(self, load_wind_no_bounds): + self.wind = load_wind_no_bounds self.draw_method = iplt.pcolormesh -@tests.skip_plot +@_shared_utils.skip_plot class Slice1dMixin: """Mixin class providing tests for each 1-dimensional permutation of axes. @@ -760,46 +693,26 @@ def test_t_dates(self): self.check_graphic() -@tests.skip_data -class TestPlot(tests.GraphicsTest, Slice1dMixin): +@_shared_utils.skip_data +class TestPlot(_shared_utils.GraphicsTest, Slice1dMixin): """Test the iris.plot.plot routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = iplt.plot -@tests.skip_data -class TestQuickplotPlot(tests.GraphicsTest, Slice1dMixin): +@_shared_utils.skip_data +class TestQuickplotPlot(_shared_utils.GraphicsTest, Slice1dMixin): """Test the iris.quickplot.plot routine.""" - def setUp(self): - super().setUp() - self.wind = _load_4d_testcube() + @pytest.fixture(autouse=True) + def _setup(self, load_4d_testcube): + self.wind = load_4d_testcube self.draw_method = qplt.plot -_load_cube_once_cache: dict[tuple[str, str], iris.cube.Cube] = {} - - -def load_cube_once(filename, constraint): - """Same syntax as load_cube, but will only load a file once. - - Then cache the answer in a dictionary. - - """ - global _load_cube_once_cache - key = (filename, str(constraint)) - cube = _load_cube_once_cache.get(key, None) - - if cube is None: - cube = iris.load_cube(filename, constraint) - _load_cube_once_cache[key] = cube - - return cube - - class LambdaStr: """Provides a callable function which has a sensible __repr__.""" @@ -814,17 +727,23 @@ def __repr__(self): return self.repr -@tests.skip_data -@tests.skip_plot -class TestPlotCoordinatesGiven(tests.GraphicsTest): - def setUp(self): - super().setUp() - filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) - self.cube = load_cube_once(filename, "air_potential_temperature") - if self.cube.coord_dims("time") != (0,): +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestPlotCoordinatesGiven(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True, scope="class") + def _get_cube(self): + # Class-scoped to avoid wastefully reloading the same Cube repeatedly. + filename = _shared_utils.get_data_path( + ("PP", "COLPEX", "theta_and_orog_subset.pp") + ) + cube = iris.load_cube(filename, "air_potential_temperature") + if cube.coord_dims("time") != (0,): # A quick fix for data which has changed since we support time-varying orography - self.cube.transpose((1, 0, 2, 3)) + cube.transpose((1, 0, 2, 3)) + self.__class__.cube = cube + @pytest.fixture(autouse=True) + def _setup(self): self.draw_module = iris.plot self.contourf = LambdaStr( "iris.plot.contourf", @@ -926,26 +845,35 @@ def test_y(self): def test_badcoords(self): cube = self.cube[0, 0, :, :] draw_fn = getattr(self.draw_module, "contourf") - self.assertRaises( + pytest.raises( ValueError, draw_fn, cube, coords=["grid_longitude", "grid_longitude"], + match="don't span the 2 data dimensions", ) - self.assertRaises( + pytest.raises( ValueError, draw_fn, cube, coords=["grid_longitude", "grid_longitude", "grid_latitude"], + match="should have the same length", ) - self.assertRaises( + pytest.raises( iris.exceptions.CoordinateNotFoundError, draw_fn, cube, coords=["grid_longitude", "wibble"], + match="but found none", + ) + pytest.raises( + ValueError, + draw_fn, + cube, + coords=[], + match="should have the same length", ) - self.assertRaises(ValueError, draw_fn, cube, coords=[]) - self.assertRaises( + pytest.raises( ValueError, draw_fn, cube, @@ -953,8 +881,9 @@ def test_badcoords(self): cube.coord("grid_longitude"), cube.coord("grid_longitude"), ], + match="don't span the 2 data dimensions", ) - self.assertRaises( + pytest.raises( ValueError, draw_fn, cube, @@ -963,6 +892,7 @@ def test_badcoords(self): cube.coord("grid_longitude"), cube.coord("grid_longitude"), ], + match="should have the same length", ) def test_non_cube_coordinate(self): @@ -977,21 +907,21 @@ def test_non_cube_coordinate(self): self.draw("contourf", cube, coords=["grid_latitude", x]) -@tests.skip_data -@tests.skip_plot -class TestPlotHist(tests.GraphicsTest): +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestPlotHist(_shared_utils.GraphicsTest): def test_cube(self): cube = simple_cube()[0] iplt.hist(cube, bins=np.linspace(287.7, 288.2, 11)) self.check_graphic() -@tests.skip_data -@tests.skip_plot -class TestPlotDimAndAuxCoordsKwarg(tests.GraphicsTest): - def setUp(self): - super().setUp() - filename = tests.get_data_path( +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestPlotDimAndAuxCoordsKwarg(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): + filename = _shared_utils.get_data_path( ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") ) self.cube = iris.load_cube(filename) @@ -1033,8 +963,8 @@ def test_yx_order(self): self.check_graphic() -@tests.skip_plot -class TestSymbols(tests.GraphicsTest): +@_shared_utils.skip_plot +class TestSymbols(_shared_utils.GraphicsTest): def test_cloud_cover(self): iplt.symbols( list(range(10)), @@ -1046,10 +976,11 @@ def test_cloud_cover(self): self.check_graphic() -@tests.skip_plot -class TestPlottingExceptions(tests.IrisTest): - def setUp(self): - self.bounded_cube = tests.stock.lat_lon_cube() +@_shared_utils.skip_plot +class TestPlottingExceptions: + @pytest.fixture(autouse=True) + def _setup(self): + self.bounded_cube = iris.tests.stock.lat_lon_cube() self.bounded_cube.coord("latitude").guess_bounds() self.bounded_cube.coord("longitude").guess_bounds() @@ -1064,7 +995,7 @@ def test_boundmode_multidim(self): ), [0, 1], ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Could not get XY grid from bounds"): iplt.pcolormesh(cube, coords=["longitude", "latitude"]) def test_boundmode_4bounds(self): @@ -1077,7 +1008,7 @@ def test_boundmode_4bounds(self): ).transpose() cube.remove_coord("latitude") cube.add_aux_coord(lat, 0) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Could not get XY grid from bounds."): iplt.pcolormesh(cube, coords=["longitude", "latitude"]) def test_different_coord_systems(self): @@ -1086,15 +1017,15 @@ def test_different_coord_systems(self): lon = cube.coord("longitude") lat.coord_system = iris.coord_systems.GeogCS(7000000) lon.coord_system = iris.coord_systems.GeogCS(7000001) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="must have equal coordinate systems"): iplt.pcolormesh(cube, coords=["longitude", "latitude"]) -@tests.skip_data -@tests.skip_plot -class TestPlotOtherCoordSystems(tests.GraphicsTest): +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestPlotOtherCoordSystems(_shared_utils.GraphicsTest): def test_plot_tmerc(self): - filename = tests.get_data_path( + filename = _shared_utils.get_data_path( ("NetCDF", "transverse_mercator", "tmean_1910_1910.nc") ) self.cube = iris.load_cube(filename) @@ -1103,10 +1034,10 @@ def test_plot_tmerc(self): self.check_graphic() -@tests.skip_plot -class TestPlotCitation(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +class TestPlotCitation(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.figure = plt.figure() self.axes = self.figure.gca() self.text = ( @@ -1126,7 +1057,3 @@ def test_figure(self): def test_axes(self): iplt.citation(self.text, axes=self.axes) self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/test_pp_cf.py b/lib/iris/tests/test_pp_cf.py index 8b0af5a5c3..538ce7c385 100644 --- a/lib/iris/tests/test_pp_cf.py +++ b/lib/iris/tests/test_pp_cf.py @@ -3,16 +3,16 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - import os import tempfile +import pytest + import iris import iris.coords from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.pp import STASH +from iris.tests import _shared_utils import iris.util @@ -62,10 +62,14 @@ def callback_aaxzc_n10r13xy_b_pp(cube, field, filename): cube.add_aux_coord(height_coord) -@tests.skip_data -class TestAll(tests.IrisTest, tests.PPTest): +@_shared_utils.skip_data +class TestAll: _ref_dir = ("usecases", "pp_to_cf_conversion") + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def _test_file(self, name): """Main test routine that is called for each of the files listed below.""" pp_path = self._src_pp_path(name) @@ -80,7 +84,9 @@ def _test_file(self, name): else: fname_name = name - self.assertCML(cubes, self._ref_dir + ("from_pp", fname_name + ".cml")) + _shared_utils.assert_CML( + self.request, cubes, self._ref_dir + ("from_pp", fname_name + ".cml") + ) # 2) Save the Cube and check the netCDF nc_filenames = [] @@ -99,7 +105,8 @@ def _test_file(self, name): ) # Check the netCDF file against CDL expected output. - self.assertCDL( + _shared_utils.assert_CDL( + self.request, file_nc, self._ref_dir + ("to_netcdf", "%s_%d.cdl" % (fname_name, index)), ) @@ -109,7 +116,8 @@ def _test_file(self, name): for index, nc_filename in enumerate(nc_filenames): # Read netCDF to Cube. cube = iris.load_cube(nc_filename) - self.assertCML( + _shared_utils.assert_CML( + self.request, cube, self._ref_dir + ("from_netcdf", "%s_%d.cml" % (fname_name, index)), ) @@ -122,15 +130,15 @@ def _test_file(self, name): self._test_pp_save(cubes, name) def _src_pp_path(self, name): - return tests.get_data_path(("PP", "cf_processing", name)) + return _shared_utils.get_data_path(("PP", "cf_processing", name)) def _test_pp_save(self, cubes, name): # If there's no existing reference file then make it from the *source* data - reference_txt_path = tests.get_result_path( + reference_txt_path = _shared_utils.get_result_path( self._ref_dir + ("to_pp", name + ".txt") ) reference_pp_path = self._src_pp_path(name) - with self.cube_save_test( + with _shared_utils.pp_cube_save_test( reference_txt_path, reference_pp_path=reference_pp_path ) as temp_pp_path: iris.save(cubes, temp_pp_path) @@ -187,7 +195,3 @@ def attach_tests(): attach_tests() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index 3a8e988a4d..72ed851275 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -3,58 +3,54 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - from copy import deepcopy import os from types import GeneratorType -import unittest -from unittest import mock import cftime from numpy.testing import assert_array_equal +import pytest -import iris.fileformats import iris.fileformats.pp as pp -import iris.util +from iris.tests import _shared_utils -@tests.skip_data -class TestPPCopy(tests.IrisTest): - def setUp(self): - self.filename = tests.get_data_path(("PP", "aPPglob1", "global.pp")) +@_shared_utils.skip_data +class TestPPCopy: + @pytest.fixture(autouse=True) + def _setup(self): + self.filename = _shared_utils.get_data_path(("PP", "aPPglob1", "global.pp")) def test_copy_field_deferred(self): field = next(pp.load(self.filename)) clone = field.copy() - self.assertEqual(field, clone) + assert field == clone clone.lbyr = 666 - self.assertNotEqual(field, clone) + assert field != clone def test_deepcopy_field_deferred(self): field = next(pp.load(self.filename)) clone = deepcopy(field) - self.assertEqual(field, clone) + assert field == clone clone.lbyr = 666 - self.assertNotEqual(field, clone) + assert field != clone def test_copy_field_non_deferred(self): field = next(pp.load(self.filename, True)) clone = field.copy() - self.assertEqual(field, clone) + assert field == clone clone.data[0][0] = 666 - self.assertNotEqual(field, clone) + assert field != clone def test_deepcopy_field_non_deferred(self): field = next(pp.load(self.filename, True)) clone = deepcopy(field) - self.assertEqual(field, clone) + assert field == clone clone.data[0][0] = 666 - self.assertNotEqual(field, clone) + assert field != clone -class IrisPPTest(tests.IrisTest): +class IrisPPTest: def check_pp(self, pp_fields, reference_filename): """Checks the given iterable of PPField objects matches the reference file, or creates the reference file if it doesn't exist. @@ -68,11 +64,11 @@ def check_pp(self, pp_fields, reference_filename): pp_field.data test_string = str(pp_fields) - reference_path = tests.get_result_path(reference_filename) + reference_path = _shared_utils.get_result_path(reference_filename) if os.path.isfile(reference_path): with open(reference_path, "r") as reference_fh: reference = "".join(reference_fh.readlines()) - self._assert_str_same( + _shared_utils._assert_str_same( reference + "\n", test_string + "\n", reference_filename, @@ -83,48 +79,49 @@ def check_pp(self, pp_fields, reference_filename): reference_fh.writelines(test_string) -class TestPPHeaderDerived(tests.IrisTest): - def setUp(self): +class TestPPHeaderDerived: + @pytest.fixture(autouse=True) + def _setup(self): self.pp = pp.PPField2() self.pp.lbuser = (0, 1, 2, 3, 4, 5, 6) self.pp.lbtim = 11 self.pp.lbproc = 65539 def test_standard_access(self): - self.assertEqual(self.pp.lbtim, 11) + assert self.pp.lbtim == 11 def test_lbtim_access(self): - self.assertEqual(self.pp.lbtim[0], 1) - self.assertEqual(self.pp.lbtim.ic, 1) + assert self.pp.lbtim[0] == 1 + assert self.pp.lbtim.ic == 1 def test_lbtim_setter(self): self.pp.lbtim[4] = 4 self.pp.lbtim[0] = 4 - self.assertEqual(self.pp.lbtim[0], 4) - self.assertEqual(self.pp.lbtim.ic, 4) + assert self.pp.lbtim[0] == 4 + assert self.pp.lbtim.ic == 4 self.pp.lbtim.ib = 9 - self.assertEqual(self.pp.lbtim.ib, 9) - self.assertEqual(self.pp.lbtim[1], 9) + assert self.pp.lbtim.ib == 9 + assert self.pp.lbtim[1] == 9 def test_set_lbuser(self): self.pp.stash = "m02s12i003" - self.assertEqual(self.pp.stash, pp.STASH(2, 12, 3)) + assert self.pp.stash == pp.STASH(2, 12, 3) self.pp.lbuser[6] = 5 - self.assertEqual(self.pp.stash, pp.STASH(5, 12, 3)) + assert self.pp.stash == pp.STASH(5, 12, 3) self.pp.lbuser[3] = 4321 - self.assertEqual(self.pp.stash, pp.STASH(5, 4, 321)) + assert self.pp.stash == pp.STASH(5, 4, 321) def test_set_stash(self): self.pp.stash = "m02s12i003" - self.assertEqual(self.pp.stash, pp.STASH(2, 12, 3)) + assert self.pp.stash == pp.STASH(2, 12, 3) self.pp.stash = pp.STASH(3, 13, 4) - self.assertEqual(self.pp.stash, pp.STASH(3, 13, 4)) - self.assertEqual(self.pp.lbuser[3], self.pp.stash.lbuser3()) - self.assertEqual(self.pp.lbuser[6], self.pp.stash.lbuser6()) + assert self.pp.stash == pp.STASH(3, 13, 4) + assert self.pp.lbuser[3] == self.pp.stash.lbuser3() + assert self.pp.lbuser[6] == self.pp.stash.lbuser6() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.pp.stash = (4, 15, 5) def test_lbproc_bad_access(self): @@ -133,141 +130,144 @@ def test_lbproc_bad_access(self): except AttributeError: pass except Exception as err: - self.fail("Should return a better error: " + str(err)) + pytest.fail("Should return a better error: " + str(err)) -@tests.skip_data +@_shared_utils.skip_data class TestPPField_GlobalTemperature(IrisPPTest): - def setUp(self): - self.original_pp_filepath = tests.get_data_path(("PP", "aPPglob1", "global.pp")) + @pytest.fixture(autouse=True) + def _setup(self): + self.original_pp_filepath = _shared_utils.get_data_path( + ("PP", "aPPglob1", "global.pp") + ) self.r = list(pp.load(self.original_pp_filepath)) def test_full_file(self): self.check_pp(self.r[0:10], ("PP", "global_test.pp.txt")) def test_lbtim_access(self): - self.assertEqual(self.r[0].lbtim[0], 2) - self.assertEqual(self.r[0].lbtim.ic, 2) + assert self.r[0].lbtim[0] == 2 + assert self.r[0].lbtim.ic == 2 def test_t1_t2_access(self): field = self.r[0] calendar = "360_day" - self.assertEqual( - field.t1.timetuple(), - cftime.datetime(1994, 12, 1, 0, 0, calendar=calendar).timetuple(), + assert ( + field.t1.timetuple() + == cftime.datetime(1994, 12, 1, 0, 0, calendar=calendar).timetuple() ) - def test_save_single(self): - temp_filename = iris.util.create_temp_filename(".pp") + def test_save_single(self, tmp_path): + temp_filename = tmp_path / "foo.pp" with open(temp_filename, "wb") as temp_fh: self.r[0].save(temp_fh) - self.assertEqual( - self.file_checksum(temp_filename), - self.file_checksum(self.original_pp_filepath), - ) - os.remove(temp_filename) + assert _shared_utils.file_checksum( + temp_filename + ) == _shared_utils.file_checksum(self.original_pp_filepath) - def test_save_api(self): + def test_save_api(self, tmp_path): filepath = self.original_pp_filepath f = next(pp.load(filepath)) - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = tmp_path / "foo.pp" with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) - self.assertEqual( - self.file_checksum(temp_filename), self.file_checksum(filepath) - ) + assert _shared_utils.file_checksum( + temp_filename + ) == _shared_utils.file_checksum(filepath) - os.remove(temp_filename) - -@tests.skip_data +@_shared_utils.skip_data class TestPackedPP(IrisPPTest): - def test_wgdos(self): - filepath = tests.get_data_path( + def test_wgdos(self, mocker, tmp_path): + filepath = _shared_utils.get_data_path( ("PP", "wgdos_packed", "nae.20100104-06_0001.pp") ) r = pp.load(filepath) # Check that the result is a generator and convert to a list so that we # can index and get the first one - self.assertEqual(type(r), GeneratorType) + assert isinstance(r, GeneratorType) r = list(r) self.check_pp(r, ("PP", "nae_unpacked.pp.txt")) # check that trying to save this field again raises an error # (we cannot currently write WGDOS packed fields without mo_pack) - temp_filename = iris.util.create_temp_filename(".pp") - with mock.patch("iris.fileformats.pp.mo_pack", None): - with self.assertRaises(NotImplementedError): - with open(temp_filename, "wb") as temp_fh: - r[0].save(temp_fh) - os.remove(temp_filename) - - @unittest.skipIf(pp.mo_pack is None, "Requires mo_pack.") - def test_wgdos_mo_pack(self): - filepath = tests.get_data_path( + temp_filename = tmp_path / "foo.pp" + mocker.patch("iris.fileformats.pp.mo_pack", None) + with pytest.raises(NotImplementedError): + with open(temp_filename, "wb") as temp_fh: + r[0].save(temp_fh) + + @pytest.mark.skipif(pp.mo_pack is None, reason="Requires mo_pack.") + def test_wgdos_mo_pack(self, tmp_path): + filepath = _shared_utils.get_data_path( ("PP", "wgdos_packed", "nae.20100104-06_0001.pp") ) orig_fields = pp.load(filepath) - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as fh: - for field in orig_fields: - field.save(fh) - saved_fields = pp.load(temp_filename) - for orig_field, saved_field in zip(orig_fields, saved_fields): - assert_array_equal(orig_field.data, saved_field.data) + temp_filename = tmp_path / "foo.pp" + with open(temp_filename, "wb") as fh: + for field in orig_fields: + field.save(fh) + saved_fields = pp.load(temp_filename) + for orig_field, saved_field in zip(orig_fields, saved_fields): + assert_array_equal(orig_field.data, saved_field.data) - def test_rle(self): - r = pp.load(tests.get_data_path(("PP", "ocean_rle", "ocean_rle.pp"))) + def test_rle(self, tmp_path): + r = pp.load(_shared_utils.get_data_path(("PP", "ocean_rle", "ocean_rle.pp"))) # Check that the result is a generator and convert to a list so that we # can index and get the first one - self.assertEqual(type(r), GeneratorType) + assert isinstance(r, GeneratorType) r = list(r) self.check_pp(r, ("PP", "rle_unpacked.pp.txt")) # check that trying to save this field again raises an error # (we cannot currently write RLE packed fields) - with self.temp_filename(".pp") as temp_filename: - with self.assertRaises(NotImplementedError): - with open(temp_filename, "wb") as temp_fh: - r[0].save(temp_fh) + temp_filename = tmp_path / "foo.pp" + with pytest.raises(NotImplementedError): + with open(temp_filename, "wb") as temp_fh: + r[0].save(temp_fh) -@tests.skip_data +@_shared_utils.skip_data class TestPPFile(IrisPPTest): def test_lots_of_extra_data(self): r = pp.load( - tests.get_data_path( + _shared_utils.get_data_path( ("PP", "cf_processing", "HadCM2_ts_SAT_ann_18602100.b.pp") ) ) r = list(r) - self.assertEqual(r[0].lbcode.ix, 13) - self.assertEqual(r[0].lbcode.iy, 23) - self.assertEqual(len(r[0].lbcode), 5) + assert r[0].lbcode.ix == 13 + assert r[0].lbcode.iy == 23 + assert len(r[0].lbcode) == 5 self.check_pp(r, ("PP", "extra_data_time_series.pp.txt")) -@tests.skip_data +@_shared_utils.skip_data class TestPPFileExtraXData(IrisPPTest): - def setUp(self): - self.original_pp_filepath = tests.get_data_path(("PP", "ukV1", "ukVpmslont.pp")) + @pytest.fixture(autouse=True) + def _setup(self): + self.original_pp_filepath = _shared_utils.get_data_path( + ("PP", "ukV1", "ukVpmslont.pp") + ) self.r = list(pp.load(self.original_pp_filepath))[0:5] def test_full_file(self): self.check_pp(self.r, ("PP", "extra_x_data.pp.txt")) - def test_save_single(self): - filepath = tests.get_data_path(("PP", "ukV1", "ukVpmslont_first_field.pp")) + def test_save_single(self, tmp_path): + filepath = _shared_utils.get_data_path( + ("PP", "ukV1", "ukVpmslont_first_field.pp") + ) f = next(pp.load(filepath)) - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = tmp_path / "foo.pp" with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) @@ -275,36 +275,36 @@ def test_save_single(self): # force the data to be loaded (this was done for f when save was run) s.data - self._assert_str_same( + _shared_utils._assert_str_same( str(s) + "\n", str(f) + "\n", "", type_comparison_name="PP files" ) - self.assertEqual( - self.file_checksum(temp_filename), self.file_checksum(filepath) - ) - os.remove(temp_filename) + assert _shared_utils.file_checksum( + temp_filename + ) == _shared_utils.file_checksum(filepath) -@tests.skip_data +@_shared_utils.skip_data class TestPPFileWithExtraCharacterData(IrisPPTest): - def setUp(self): - self.original_pp_filepath = tests.get_data_path( + @pytest.fixture(autouse=True) + def _setup(self): + self.original_pp_filepath = _shared_utils.get_data_path( ("PP", "globClim1", "dec_subset.pp") ) self.r = pp.load(self.original_pp_filepath) self.r_loaded_data = pp.load(self.original_pp_filepath, read_data=True) # Check that the result is a generator and convert to a list so that we can index and get the first one - self.assertEqual(type(self.r), GeneratorType) + assert isinstance(self.r, GeneratorType) self.r = list(self.r) - self.assertEqual(type(self.r_loaded_data), GeneratorType) + assert isinstance(self.r_loaded_data, GeneratorType) self.r_loaded_data = list(self.r_loaded_data) def test_extra_field_title(self): - self.assertEqual( - self.r[0].field_title, - "AJHQA Time mean !C Atmos u compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00", + assert ( + self.r[0].field_title + == "AJHQA Time mean !C Atmos u compnt of wind after timestep at 9.998 metres !C 01/12/2007 00:00 -> 01/01/2008 00:00" ) def test_full_file(self): @@ -314,11 +314,13 @@ def test_full_file(self): ("PP", "extra_char_data.w_data_loaded.pp.txt"), ) - def test_save_single(self): - filepath = tests.get_data_path(("PP", "model_comp", "dec_first_field.pp")) + def test_save_single(self, tmp_path): + filepath = _shared_utils.get_data_path( + ("PP", "model_comp", "dec_first_field.pp") + ) f = next(pp.load(filepath)) - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = tmp_path / "foo.pp" with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) @@ -326,60 +328,59 @@ def test_save_single(self): # force the data to be loaded (this was done for f when save was run) s.data - self._assert_str_same( + _shared_utils._assert_str_same( str(s) + "\n", str(f) + "\n", "", type_comparison_name="PP files" ) - self.assertEqual( - self.file_checksum(temp_filename), self.file_checksum(filepath) - ) - os.remove(temp_filename) + assert _shared_utils.file_checksum( + temp_filename + ) == _shared_utils.file_checksum(filepath) -class TestSplittableInt(tests.IrisTest): +class TestSplittableInt: def test_3(self): t = pp.SplittableInt(3) - self.assertEqual(t[0], 3) + assert t[0] == 3 def test_grow_str_list(self): t = pp.SplittableInt(3) t[1] = 3 - self.assertEqual(t[1], 3) + assert t[1] == 3 t[5] = 4 - self.assertEqual(t[5], 4) + assert t[5] == 4 - self.assertEqual(int(t), 400033) + assert int(t) == 400033 - self.assertEqual(t, 400033) - self.assertNotEqual(t, 33) + assert t == 400033 + assert t != 33 - self.assertTrue(t >= 400033) - self.assertFalse(t >= 400034) + assert t >= 400033 + assert not t >= 400034 - self.assertTrue(t <= 400033) - self.assertFalse(t <= 400032) + assert t <= 400033 + assert not t <= 400032 - self.assertTrue(t > 400032) - self.assertFalse(t > 400034) + assert t > 400032 + assert not t > 400034 - self.assertTrue(t < 400034) - self.assertFalse(t < 400032) + assert t < 400034 + assert not t < 400032 def test_name_mapping(self): t = pp.SplittableInt(33214, {"ones": 0, "tens": 1, "hundreds": 2}) - self.assertEqual(t.ones, 4) - self.assertEqual(t.tens, 1) - self.assertEqual(t.hundreds, 2) + assert t.ones == 4 + assert t.tens == 1 + assert t.hundreds == 2 t.ones = 9 t.tens = 4 t.hundreds = 0 - self.assertEqual(t.ones, 9) - self.assertEqual(t.tens, 4) - self.assertEqual(t.hundreds, 0) + assert t.ones == 9 + assert t.tens == 4 + assert t.hundreds == 0 def test_name_mapping_multi_index(self): t = pp.SplittableInt( @@ -390,69 +391,66 @@ def test_name_mapping_multi_index(self): "backwards": slice(None, None, -1), }, ) - self.assertEqual(t.weird_number, 324) - self.assertEqual(t.last_few, 13) - self.assertRaises(ValueError, setattr, t, "backwards", 1) - self.assertRaises(ValueError, setattr, t, "last_few", 1) - self.assertEqual(t.backwards, 41233) - self.assertEqual(t, 33214) + assert t.weird_number == 324 + assert t.last_few == 13 + pytest.raises(ValueError, setattr, t, "backwards", 1) + pytest.raises(ValueError, setattr, t, "last_few", 1) + assert t.backwards == 41233 + assert t == 33214 t.weird_number = 99 # notice that this will zero the 5th number - self.assertEqual(t, 3919) + assert t == 3919 t.weird_number = 7899 - self.assertEqual(t, 7083919) + assert t == 7083919 t.foo = 1 t = pp.SplittableInt(33214, {"ix": slice(None, 2), "iy": slice(2, 4)}) - self.assertEqual(t.ix, 14) - self.assertEqual(t.iy, 32) + assert t.ix == 14 + assert t.iy == 32 t.ix = 21 - self.assertEqual(t, 33221) + assert t == 33221 t = pp.SplittableInt(33214, {"ix": slice(-1, 2)}) - self.assertEqual(t.ix, 0) + assert t.ix == 0 t = pp.SplittableInt(4, {"ix": slice(None, 2), "iy": slice(2, 4)}) - self.assertEqual(t.ix, 4) - self.assertEqual(t.iy, 0) + assert t.ix == 4 + assert t.iy == 0 def test_33214(self): t = pp.SplittableInt(33214) - self.assertEqual(t[4], 3) - self.assertEqual(t[3], 3) - self.assertEqual(t[2], 2) - self.assertEqual(t[1], 1) - self.assertEqual(t[0], 4) + assert t[4] == 3 + assert t[3] == 3 + assert t[2] == 2 + assert t[1] == 1 + assert t[0] == 4 # The rest should be zero for i in range(5, 100): - self.assertEqual(t[i], 0) + assert t[i] == 0 def test_negative_number(self): - self.assertRaises(ValueError, pp.SplittableInt, -5) - try: + with pytest.raises( + ValueError, + match="Negative numbers not supported with splittable integers object", + ): _ = pp.SplittableInt(-5) - except ValueError as err: - self.assertEqual( - str(err), - "Negative numbers not supported with splittable integers object", - ) -class TestSplittableIntEquality(tests.IrisTest): +class TestSplittableIntEquality: def test_not_implemented(self): class Terry: pass sin = pp.SplittableInt(0) - self.assertIs(sin.__eq__(Terry()), NotImplemented) - self.assertIs(sin.__ne__(Terry()), NotImplemented) + assert sin.__eq__(Terry()) is NotImplemented + assert sin.__ne__(Terry()) is NotImplemented -class TestPPDataProxyEquality(tests.IrisTest): +class TestPPDataProxyEquality: def test_not_implemented(self): class Terry: pass @@ -467,19 +465,15 @@ class Terry: "beans", "eggs", ) - self.assertIs(pox.__eq__(Terry()), NotImplemented) - self.assertIs(pox.__ne__(Terry()), NotImplemented) + assert pox.__eq__(Terry()) is NotImplemented + assert pox.__ne__(Terry()) is NotImplemented -class TestPPFieldEquality(tests.IrisTest): +class TestPPFieldEquality: def test_not_implemented(self): class Terry: pass pox = pp.PPField3() - self.assertIs(pox.__eq__(Terry()), NotImplemented) - self.assertIs(pox.__ne__(Terry()), NotImplemented) - - -if __name__ == "__main__": - tests.main() + assert pox.__eq__(Terry()) is NotImplemented + assert pox.__ne__(Terry()) is NotImplemented diff --git a/lib/iris/tests/test_pp_stash.py b/lib/iris/tests/test_pp_stash.py index e5b6953bf3..8123336c03 100644 --- a/lib/iris/tests/test_pp_stash.py +++ b/lib/iris/tests/test_pp_stash.py @@ -3,104 +3,98 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip +import pytest import iris import iris.fileformats.pp import iris.io +from iris.tests import _shared_utils import iris.tests.stock import iris.util -class TestPPStash(tests.IrisTest): - @tests.skip_data +class TestPPStash: + @_shared_utils.skip_data def test_cube_attributes(self): - cube = tests.stock.simple_pp() - self.assertEqual("m01s16i203", cube.attributes["STASH"]) - self.assertNotEqual("m01s16i999", cube.attributes["STASH"]) - self.assertEqual(cube.attributes["STASH"], "m01s16i203") - self.assertNotEqual(cube.attributes["STASH"], "m01s16i999") - - @tests.skip_data + cube = iris.tests.stock.simple_pp() + assert "m01s16i203" == cube.attributes["STASH"] + assert "m01s16i999" != cube.attributes["STASH"] + # Also exercise iris.fileformats.pp.STASH eq and ne methods. + assert cube.attributes["STASH"] == "m01s16i203" + assert cube.attributes["STASH"] != "m01s16i999" + + @_shared_utils.skip_data def test_ppfield(self): - data_path = tests.get_data_path(("PP", "simple_pp", "global.pp")) + data_path = _shared_utils.get_data_path(("PP", "simple_pp", "global.pp")) pps = iris.fileformats.pp.load(data_path) for pp in pps: - self.assertEqual("m01s16i203", pp.stash) - self.assertNotEqual("m01s16i999", pp.stash) - self.assertEqual(pp.stash, "m01s16i203") - self.assertNotEqual(pp.stash, "m01s16i999") + assert "m01s16i203" == pp.stash + assert "m01s16i999" != pp.stash + # Also exercise iris.fileformats.pp.STASH eq and ne methods. + assert pp.stash == "m01s16i203" + assert pp.stash != "m01s16i999" def test_stash_against_stash(self): - self.assertEqual( - iris.fileformats.pp.STASH(1, 2, 3), - iris.fileformats.pp.STASH(1, 2, 3), - ) - self.assertNotEqual( - iris.fileformats.pp.STASH(1, 2, 3), - iris.fileformats.pp.STASH(2, 3, 4), - ) + assert iris.fileformats.pp.STASH(1, 2, 3) == iris.fileformats.pp.STASH(1, 2, 3) + assert iris.fileformats.pp.STASH(1, 2, 3) != iris.fileformats.pp.STASH(2, 3, 4) def test_stash_against_str(self): - self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s02i003") - self.assertEqual("m01s02i003", iris.fileformats.pp.STASH(1, 2, 3)) - self.assertNotEqual(iris.fileformats.pp.STASH(1, 2, 3), "m02s03i004") - self.assertNotEqual("m02s03i004", iris.fileformats.pp.STASH(1, 2, 3)) + # Also exercise iris.fileformats.pp.STASH eq and ne methods. + assert iris.fileformats.pp.STASH(1, 2, 3) == "m01s02i003" + assert "m01s02i003" == iris.fileformats.pp.STASH(1, 2, 3) + assert iris.fileformats.pp.STASH(1, 2, 3) != "m02s03i004" + assert "m02s03i004" != iris.fileformats.pp.STASH(1, 2, 3) def test_irregular_stash_str(self): - self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s02i0000000003") - self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s02i3") - self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m01s2i3") - self.assertEqual(iris.fileformats.pp.STASH(1, 2, 3), "m1s2i3") - - self.assertEqual("m01s02i0000000003", iris.fileformats.pp.STASH(1, 2, 3)) - self.assertEqual("m01s02i3", iris.fileformats.pp.STASH(1, 2, 3)) - self.assertEqual("m01s2i3", iris.fileformats.pp.STASH(1, 2, 3)) - self.assertEqual("m1s2i3", iris.fileformats.pp.STASH(1, 2, 3)) - - self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s02i0000000003") - self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s02i3") - self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m01s2i3") - self.assertNotEqual(iris.fileformats.pp.STASH(2, 3, 4), "m1s2i3") - - self.assertNotEqual("m01s02i0000000003", iris.fileformats.pp.STASH(2, 3, 4)) - self.assertNotEqual("m01s02i3", iris.fileformats.pp.STASH(2, 3, 4)) - self.assertNotEqual("m01s2i3", iris.fileformats.pp.STASH(2, 3, 4)) - self.assertNotEqual("m1s2i3", iris.fileformats.pp.STASH(2, 3, 4)) - - self.assertEqual(iris.fileformats.pp.STASH.from_msi("M01s02i003"), "m01s02i003") - self.assertEqual("m01s02i003", iris.fileformats.pp.STASH.from_msi("M01s02i003")) + # Also exercise iris.fileformats.pp.STASH eq and ne methods. + assert iris.fileformats.pp.STASH(1, 2, 3) == "m01s02i0000000003" + assert iris.fileformats.pp.STASH(1, 2, 3) == "m01s02i3" + assert iris.fileformats.pp.STASH(1, 2, 3) == "m01s2i3" + assert iris.fileformats.pp.STASH(1, 2, 3) == "m1s2i3" + + assert "m01s02i0000000003" == iris.fileformats.pp.STASH(1, 2, 3) + assert "m01s02i3" == iris.fileformats.pp.STASH(1, 2, 3) + assert "m01s2i3" == iris.fileformats.pp.STASH(1, 2, 3) + assert "m1s2i3" == iris.fileformats.pp.STASH(1, 2, 3) + + assert iris.fileformats.pp.STASH(2, 3, 4) != "m01s02i0000000003" + assert iris.fileformats.pp.STASH(2, 3, 4) != "m01s02i3" + assert iris.fileformats.pp.STASH(2, 3, 4) != "m01s2i3" + assert iris.fileformats.pp.STASH(2, 3, 4) != "m1s2i3" + + assert "m01s02i0000000003" != iris.fileformats.pp.STASH(2, 3, 4) + assert "m01s02i3" != iris.fileformats.pp.STASH(2, 3, 4) + assert "m01s2i3" != iris.fileformats.pp.STASH(2, 3, 4) + assert "m1s2i3" != iris.fileformats.pp.STASH(2, 3, 4) + + assert iris.fileformats.pp.STASH.from_msi("M01s02i003") == "m01s02i003" + assert "m01s02i003" == iris.fileformats.pp.STASH.from_msi("M01s02i003") def test_illegal_stash_str_range(self): - self.assertEqual(iris.fileformats.pp.STASH(0, 2, 3), "m??s02i003") - self.assertNotEqual(iris.fileformats.pp.STASH(0, 2, 3), "m01s02i003") + # Also exercise iris.fileformats.pp.STASH eq and ne methods. + assert iris.fileformats.pp.STASH(0, 2, 3) == "m??s02i003" + assert iris.fileformats.pp.STASH(0, 2, 3) != "m01s02i003" - self.assertEqual("m??s02i003", iris.fileformats.pp.STASH(0, 2, 3)) - self.assertNotEqual("m01s02i003", iris.fileformats.pp.STASH(0, 2, 3)) + assert "m??s02i003" == iris.fileformats.pp.STASH(0, 2, 3) + assert "m01s02i003" != iris.fileformats.pp.STASH(0, 2, 3) - self.assertEqual(iris.fileformats.pp.STASH(0, 2, 3), "m??s02i003") - self.assertEqual(iris.fileformats.pp.STASH(0, 2, 3), "m00s02i003") - self.assertEqual("m??s02i003", iris.fileformats.pp.STASH(0, 2, 3)) - self.assertEqual("m00s02i003", iris.fileformats.pp.STASH(0, 2, 3)) + assert iris.fileformats.pp.STASH(0, 2, 3) == "m??s02i003" + assert iris.fileformats.pp.STASH(0, 2, 3) == "m00s02i003" + assert "m??s02i003" == iris.fileformats.pp.STASH(0, 2, 3) + assert "m00s02i003" == iris.fileformats.pp.STASH(0, 2, 3) - self.assertEqual(iris.fileformats.pp.STASH(100, 2, 3), "m??s02i003") - self.assertEqual(iris.fileformats.pp.STASH(100, 2, 3), "m100s02i003") - self.assertEqual("m??s02i003", iris.fileformats.pp.STASH(100, 2, 3)) - self.assertEqual("m100s02i003", iris.fileformats.pp.STASH(100, 2, 3)) + assert iris.fileformats.pp.STASH(100, 2, 3) == "m??s02i003" + assert iris.fileformats.pp.STASH(100, 2, 3) == "m100s02i003" + assert "m??s02i003" == iris.fileformats.pp.STASH(100, 2, 3) + assert "m100s02i003" == iris.fileformats.pp.STASH(100, 2, 3) def test_illegal_stash_stash_range(self): - self.assertEqual( - iris.fileformats.pp.STASH(0, 2, 3), - iris.fileformats.pp.STASH(0, 2, 3), - ) - self.assertEqual( - iris.fileformats.pp.STASH(100, 2, 3), - iris.fileformats.pp.STASH(100, 2, 3), + assert iris.fileformats.pp.STASH(0, 2, 3) == iris.fileformats.pp.STASH(0, 2, 3) + assert iris.fileformats.pp.STASH(100, 2, 3) == iris.fileformats.pp.STASH( + 100, 2, 3 ) - self.assertEqual( - iris.fileformats.pp.STASH(100, 2, 3), - iris.fileformats.pp.STASH(999, 2, 3), + assert iris.fileformats.pp.STASH(100, 2, 3) == iris.fileformats.pp.STASH( + 999, 2, 3 ) def test_illegal_stash_format(self): @@ -112,9 +106,9 @@ def test_illegal_stash_format(self): for test_value, reference in test_values: msg = "Expected STASH code .* {!r}".format(test_value) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): test_value == iris.fileformats.pp.STASH(*reference) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): iris.fileformats.pp.STASH(*reference) == test_value def test_illegal_stash_type(self): @@ -125,16 +119,12 @@ def test_illegal_stash_type(self): for test_value, reference in test_values: msg = "Expected STASH code .* {!r}".format(test_value) - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): iris.fileformats.pp.STASH.from_msi(test_value) == reference - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): reference == iris.fileformats.pp.STASH.from_msi(test_value) def test_stash_lbuser(self): stash = iris.fileformats.pp.STASH(2, 32, 456) - self.assertEqual(stash.lbuser6(), 2) - self.assertEqual(stash.lbuser3(), 32456) - - -if __name__ == "__main__": - tests.main() + assert stash.lbuser6() == 2 + assert stash.lbuser3() == 32456 diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py index a61703761f..88eca67d6d 100644 --- a/lib/iris/tests/test_pp_to_cube.py +++ b/lib/iris/tests/test_pp_to_cube.py @@ -3,26 +3,28 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip +from uuid import uuid4 -import os +import pytest import iris import iris.fileformats.pp import iris.fileformats.pp_load_rules import iris.fileformats.rules import iris.io +from iris.tests import _shared_utils import iris.tests.stock import iris.util -@tests.skip_data -class TestPPLoadCustom(tests.IrisTest): - def setUp(self): +@_shared_utils.skip_data +class TestPPLoadCustom: + @pytest.fixture(autouse=True) + def _setup(self, request): self.subcubes = iris.cube.CubeList() - filename = tests.get_data_path(("PP", "aPPglob1", "global.pp")) + filename = _shared_utils.get_data_path(("PP", "aPPglob1", "global.pp")) self.template = next(iris.fileformats.pp.load(filename)) + self.request = request def _field_to_cube(self, field): cube, _, _ = iris.fileformats.rules._make_cube( @@ -38,7 +40,7 @@ def test_lbtim_2(self): cube = self._field_to_cube(field) self.subcubes.append(cube) cube = self.subcubes.merge()[0] - self.assertCML(cube, ("pp_load_rules", "lbtim_2.cml")) + _shared_utils.assert_CML(self.request, cube, ("pp_load_rules", "lbtim_2.cml")) def _ocean_depth(self, bounded=False): lbuser = list(self.template.lbuser) @@ -62,16 +64,21 @@ def _ocean_depth(self, bounded=False): def test_ocean_depth(self): self._ocean_depth() cube = self.subcubes.merge()[0] - self.assertCML(cube, ("pp_load_rules", "ocean_depth.cml")) + _shared_utils.assert_CML( + self.request, cube, ("pp_load_rules", "ocean_depth.cml") + ) def test_ocean_depth_bounded(self): self._ocean_depth(bounded=True) cube = self.subcubes.merge()[0] - self.assertCML(cube, ("pp_load_rules", "ocean_depth_bounded.cml")) + _shared_utils.assert_CML( + self.request, cube, ("pp_load_rules", "ocean_depth_bounded.cml") + ) -class TestReferences(tests.IrisTest): - def setUp(self): +class TestReferences: + @pytest.fixture(autouse=True) + def _setup(self): target = iris.tests.stock.simple_2d() target.data = target.data.astype("f4") self.target = target @@ -82,7 +89,7 @@ def test_regrid_missing_coord(self): # coords, ensure the re-grid fails nicely - i.e. returns None. self.target.remove_coord("bar") new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) - self.assertIsNone(new_ref) + assert new_ref is None def test_regrid_codimension(self): # If the target cube has two of the source dimension coords @@ -93,48 +100,56 @@ def test_regrid_codimension(self): new_foo.rename("foo") self.target.add_aux_coord(new_foo, 0) new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) - self.assertIsNone(new_ref) + assert new_ref is None def test_regrid_identity(self): new_ref, _ = iris.fileformats.rules._ensure_aligned({}, self.ref, self.target) # Bounds don't make it through the re-grid process self.ref.coord("bar").bounds = None self.ref.coord("foo").bounds = None - self.assertEqual(new_ref, self.ref) + assert new_ref == self.ref -@tests.skip_data -class TestPPLoading(tests.IrisTest): - def test_simple(self): +@_shared_utils.skip_data +class TestPPLoading: + def test_simple(self, request): cube = iris.tests.stock.simple_pp() - self.assertCML(cube, ("cube_io", "pp", "load", "global.cml")) + _shared_utils.assert_CML(request, cube, ("cube_io", "pp", "load", "global.cml")) + +@_shared_utils.skip_data +class TestPPLoadRules: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request -@tests.skip_data -class TestPPLoadRules(tests.IrisTest): def test_pp_load_rules(self): # Test PP loading and rule evaluation. cube = iris.tests.stock.simple_pp() - self.assertCML(cube, ("pp_load_rules", "global.cml")) + _shared_utils.assert_CML(self.request, cube, ("pp_load_rules", "global.cml")) - data_path = tests.get_data_path(("PP", "rotated_uk", "rotated_uk.pp")) + data_path = _shared_utils.get_data_path(("PP", "rotated_uk", "rotated_uk.pp")) cube = iris.load(data_path)[0] - self.assertCML(cube, ("pp_load_rules", "rotated_uk.cml")) + _shared_utils.assert_CML( + self.request, cube, ("pp_load_rules", "rotated_uk.cml") + ) def test_lbproc(self): - data_path = tests.get_data_path( + data_path = _shared_utils.get_data_path( ("PP", "meanMaxMin", "200806081200__qwpb.T24.pp") ) # Set up standard name and T+24 constraint constraint = iris.Constraint("air_temperature", forecast_period=24) cubes = iris.load(data_path, constraint) cubes = iris.cube.CubeList([cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]]) - self.assertCML(cubes, ("pp_load_rules", "lbproc_mean_max_min.cml")) + _shared_utils.assert_CML( + self.request, cubes, ("pp_load_rules", "lbproc_mean_max_min.cml") + ) - def test_cell_methods(self): + def test_cell_methods(self, tmp_path): # Test cell methods are created for correct values of lbproc - orig_file = tests.get_data_path(("PP", "aPPglob1", "global.pp")) + orig_file = _shared_utils.get_data_path(("PP", "aPPglob1", "global.pp")) # Values that result in cell methods being created cell_method_values = { @@ -158,7 +173,7 @@ def test_cell_methods(self): f.lbproc = value # set value # Write out pp file - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = (tmp_path / str(uuid4())).with_suffix(".pp") with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) @@ -167,16 +182,14 @@ def test_cell_methods(self): if value in cell_method_values: # Check for cell method on cube - self.assertEqual(cube.cell_methods[0].method, cell_method_values[value]) + assert cube.cell_methods[0].method == cell_method_values[value] else: # Check no cell method was created for values other than 128, 4096, 8192 - self.assertEqual(len(cube.cell_methods), 0) + assert len(cube.cell_methods) == 0 - os.remove(temp_filename) - - def test_process_flags(self): + def test_process_flags(self, tmp_path): # Test that process flags are created for correct values of lbproc - orig_file = tests.get_data_path(("PP", "aPPglob1", "global.pp")) + orig_file = _shared_utils.get_data_path(("PP", "aPPglob1", "global.pp")) # Values that result in process flags attribute NOT being created omit_process_flags_values = (64, 128, 4096, 8192) @@ -187,7 +200,7 @@ def test_process_flags(self): f.lbproc = value # set value # Write out pp file - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = (tmp_path / str(uuid4())).with_suffix(".pp") with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) @@ -196,16 +209,14 @@ def test_process_flags(self): if value in omit_process_flags_values: # Check ukmo__process_flags attribute not created - self.assertEqual(cube.attributes.get("ukmo__process_flags", None), None) + assert cube.attributes.get("ukmo__process_flags", None) is None else: # Check ukmo__process_flags attribute contains correct values - self.assertIn( - iris.fileformats.pp.lbproc_map[value], - cube.attributes["ukmo__process_flags"], + assert ( + iris.fileformats.pp.lbproc_map[value] + in cube.attributes["ukmo__process_flags"] ) - os.remove(temp_filename) - # Test multiple flag values multiple_bit_values = ((128, 32), (4096, 1024), (8192, 1024)) @@ -220,7 +231,7 @@ def test_process_flags(self): f.lbproc = sum(bit_values) # set value # Write out pp file - temp_filename = iris.util.create_temp_filename(".pp") + temp_filename = (tmp_path / str(uuid4())).with_suffix(".pp") with open(temp_filename, "wb") as temp_fh: f.save(temp_fh) @@ -228,14 +239,6 @@ def test_process_flags(self): cube = iris.load_cube(temp_filename) # Check the process flags created - self.assertEqual( - set(cube.attributes["ukmo__process_flags"]), - set(multiple_map[sum(bit_values)]), - "Mismatch between expected and actual process flags.", - ) - - os.remove(temp_filename) - - -if __name__ == "__main__": - tests.main() + assert set(cube.attributes["ukmo__process_flags"]) == set( + multiple_map[sum(bit_values)] + ), "Mismatch between expected and actual process flags." diff --git a/lib/iris/tests/test_quickplot.py b/lib/iris/tests/test_quickplot.py index 25bd8904a7..17ef68e64b 100644 --- a/lib/iris/tests/test_quickplot.py +++ b/lib/iris/tests/test_quickplot.py @@ -4,36 +4,24 @@ # See LICENSE in the root of the repository for full licensing details. """Tests the high-level plotting interface.""" -# import iris tests first so that some things can be initialised before importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest import iris +from iris.tests import _shared_utils import iris.tests.test_plot as test_plot # Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import matplotlib.pyplot as plt import iris.plot as iplt import iris.quickplot as qplt -# Caches _load_theta so subsequent calls are faster -def cache(fn, cache={}): - def inner(*args, **kwargs): - key = "result" - if not cache: - cache[key] = fn(*args, **kwargs) - return cache[key] - - return inner - - -@cache -def _load_theta(): - path = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) +@pytest.fixture(scope="module") +def load_theta(): + path = _shared_utils.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) theta = iris.load_cube(path, "air_potential_temperature") # Improve the unit @@ -42,17 +30,11 @@ def _load_theta(): return theta -@tests.skip_data -@tests.skip_plot +@_shared_utils.skip_data +@_shared_utils.skip_plot class TestQuickplotCoordinatesGiven(test_plot.TestPlotCoordinatesGiven): - def setUp(self): - tests.GraphicsTest.setUp(self) - filename = tests.get_data_path(("PP", "COLPEX", "theta_and_orog_subset.pp")) - self.cube = test_plot.load_cube_once(filename, "air_potential_temperature") - if self.cube.coord_dims("time") != (0,): - # A quick fix for data which has changed since we support time-varying orography - self.cube.transpose((1, 0, 2, 3)) - + @pytest.fixture(autouse=True) + def _setup(self): self.draw_module = iris.quickplot self.contourf = test_plot.LambdaStr( "iris.quickplot.contourf", @@ -105,12 +87,12 @@ def setUp(self): } -@tests.skip_data -@tests.skip_plot -class TestLabels(tests.GraphicsTest): - def setUp(self): - super().setUp() - self.theta = _load_theta() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestLabels(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self, load_theta): + self.theta = load_theta def _slice(self, coords): """Returns the first cube containing the requested coordinates.""" @@ -163,12 +145,12 @@ def test_contourf_axes_specified(self): qplt.contourf(self._small(), axes=axes1) # Ensure that the correct axes got the appropriate title. - self.assertEqual(axes2.get_title(), "This should not be changed") - self.assertEqual(axes1.get_title(), "Air potential temperature") + assert axes2.get_title() == "This should not be changed" + assert axes1.get_title() == "Air potential temperature" # Check that the axes labels were set correctly. - self.assertEqual(axes1.get_xlabel(), "Grid longitude / degrees") - self.assertEqual(axes1.get_ylabel(), "Altitude / m") + assert axes1.get_xlabel() == "Grid longitude / degrees" + assert axes1.get_ylabel() == "Altitude / m" def test_contourf_nameless(self): cube = self._small() @@ -231,12 +213,12 @@ def test_alignment(self): self.check_graphic() -@tests.skip_data -@tests.skip_plot -class TestTimeReferenceUnitsLabels(tests.GraphicsTest): - def setUp(self): - super().setUp() - path = tests.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestTimeReferenceUnitsLabels(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): + path = _shared_utils.get_data_path(("PP", "aPProt1", "rotatedMHtimecube.pp")) self.cube = iris.load_cube(path)[:, 0, 0] def test_reference_time_units(self): @@ -251,13 +233,13 @@ def test_not_reference_time_units(self): self.check_graphic() -@tests.skip_data -@tests.skip_plot -class TestSubplotColorbar(tests.IrisTest): - def setUp(self): - theta = _load_theta() +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestSubplotColorbar: + @pytest.fixture(autouse=True) + def _setup(self, load_theta): coords = ["model_level_number", "grid_longitude"] - self.data = next(theta.slices(coords)) + self.data = next(load_theta.slices(coords)) spec = (1, 1, 1) self.figure1 = plt.figure() self.axes1 = self.figure1.add_subplot(*spec) @@ -265,9 +247,9 @@ def setUp(self): self.axes2 = self.figure2.add_subplot(*spec) def _check(self, mappable, figure, axes): - self.assertIs(mappable.axes, axes) - self.assertIs(mappable.colorbar.mappable, mappable) - self.assertIs(mappable.colorbar.ax.get_figure(), figure) + assert mappable.axes is axes + assert mappable.colorbar.mappable is mappable + assert mappable.colorbar.ax.get_figure() is figure def test_with_axes1(self): # plot using the first figure subplot axes (explicit) @@ -285,9 +267,9 @@ def test_without_axes__default(self): self._check(mappable, self.figure2, self.axes2) -@tests.skip_data -@tests.skip_plot -class TestPlotHist(tests.GraphicsTest): +@_shared_utils.skip_data +@_shared_utils.skip_plot +class TestPlotHist(_shared_utils.GraphicsTest): def test_horizontal(self): cube = test_plot.simple_cube()[0] qplt.hist(cube, bins=np.linspace(287.7, 288.2, 11)) @@ -297,7 +279,3 @@ def test_vertical(self): cube = test_plot.simple_cube()[0] qplt.hist(cube, bins=np.linspace(287.7, 288.2, 11), orientation="horizontal") self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/test_util.py b/lib/iris/tests/test_util.py index 56774f89f8..dabe90581c 100644 --- a/lib/iris/tests/test_util.py +++ b/lib/iris/tests/test_util.py @@ -4,30 +4,28 @@ # See LICENSE in the root of the repository for full licensing details. """Test iris.util.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import inspect from io import StringIO import cf_units import numpy as np +import pytest import iris.analysis import iris.coords +from iris.tests import _shared_utils import iris.tests.stock as stock import iris.util -class TestMonotonic(tests.IrisTest): - def assertMonotonic(self, array, direction=None, **kwargs): +class TestMonotonic: + def assert_monotonic(self, array, direction=None, **kwargs): if direction is not None: mono, dir = iris.util.monotonic(array, return_direction=True, **kwargs) if not mono: - self.fail("Array was not monotonic:/n %r" % array) + pytest.fail("Array was not monotonic:/n %r" % array) if dir != np.sign(direction): - self.fail( + pytest.fail( "Array was monotonic but not in the direction expected:" "/n + requested direction: %s/n + resultant direction: %s" % (direction, dir) @@ -35,51 +33,52 @@ def assertMonotonic(self, array, direction=None, **kwargs): else: mono = iris.util.monotonic(array, **kwargs) if not mono: - self.fail("Array was not monotonic:/n %r" % array) + pytest.fail("Array was not monotonic:/n %r" % array) - def assertNotMonotonic(self, array, **kwargs): + def assert_not_monotonic(self, array, **kwargs): mono = iris.util.monotonic(array, **kwargs) if mono: - self.fail("Array was monotonic when it shouldn't be:/n %r" % array) + pytest.fail("Array was monotonic when it shouldn't be:/n %r" % array) def test_monotonic_pve(self): a = np.array([3, 4, 5.3]) - self.assertMonotonic(a) - self.assertMonotonic(a, direction=1) + self.assert_monotonic(a) + self.assert_monotonic(a, direction=1) # test the reverse for negative monotonic. a = a[::-1] - self.assertMonotonic(a) - self.assertMonotonic(a, direction=-1) + self.assert_monotonic(a) + self.assert_monotonic(a, direction=-1) def test_not_monotonic(self): b = np.array([3, 5.3, 4]) - self.assertNotMonotonic(b) + self.assert_not_monotonic(b) def test_monotonic_strict(self): b = np.array([3, 5.3, 4]) - self.assertNotMonotonic(b, strict=True) - self.assertNotMonotonic(b) + self.assert_not_monotonic(b, strict=True) + self.assert_not_monotonic(b) b = np.array([3, 5.3, 5.3]) - self.assertNotMonotonic(b, strict=True) - self.assertMonotonic(b, direction=1) + self.assert_not_monotonic(b, strict=True) + self.assert_monotonic(b, direction=1) b = b[::-1] - self.assertNotMonotonic(b, strict=True) - self.assertMonotonic(b, direction=-1) + self.assert_not_monotonic(b, strict=True) + self.assert_monotonic(b, direction=-1) b = np.array([0.0]) - self.assertRaises(ValueError, iris.util.monotonic, b) - self.assertRaises(ValueError, iris.util.monotonic, b, strict=True) + pytest.raises(ValueError, iris.util.monotonic, b) + pytest.raises(ValueError, iris.util.monotonic, b, strict=True) b = np.array([0.0, 0.0]) - self.assertNotMonotonic(b, strict=True) - self.assertMonotonic(b) + self.assert_not_monotonic(b, strict=True) + self.assert_monotonic(b) -class TestClipString(tests.IrisTest): - def setUp(self): +class TestClipString: + @pytest.fixture(autouse=True) + def _setup(self): self.test_string = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." self.rider = "**^^**$$..--__" # A good chance at being unique and not in the string to be tested! @@ -90,43 +89,32 @@ def test_oversize_string(self): result = iris.util.clip_string(self.test_string, clip_length, self.rider) # Check the length is between what we requested ( + rider length) and the length of the original string - self.assertTrue( - clip_length + len(self.rider) <= len(result) < len(self.test_string), - "String was not clipped.", - ) + fail_message = "String was not clipped." + clip_rider_len = clip_length + len(self.rider) + assert clip_rider_len <= len(result) < len(self.test_string), fail_message # Also test the rider was added - self.assertTrue( - self.rider in result, - "Rider was not added to the string when it should have been.", - ) + fail_message = "Rider was not added to the string when it should have been." + assert self.rider in result, fail_message def test_undersize_string(self): # Test with a clip length that is longer than the string clip_length = 10999 result = iris.util.clip_string(self.test_string, clip_length, self.rider) - self.assertEqual( - len(result), - len(self.test_string), - "String was clipped when it should not have been.", - ) + fail_message = "String was clipped when it should not have been." + assert len(result) == len(self.test_string), fail_message # Also test that no rider was added on the end if the string was not clipped - self.assertFalse( - self.rider in result, - "Rider was adding to the string when it should not have been.", - ) + fail_message = "Rider was adding to the string when it should not have been." + assert self.rider not in result, fail_message def test_invalid_clip_lengths(self): # Clip values less than or equal to zero are not valid for clip_length in [0, -100]: result = iris.util.clip_string(self.test_string, clip_length, self.rider) - self.assertEqual( - len(result), - len(self.test_string), - "String was clipped when it should not have been.", - ) + fail_message = "String was clipped when it should not have been." + assert len(result) == len(self.test_string), fail_message def test_default_values(self): # Get the default values specified in the function @@ -137,12 +125,10 @@ def test_default_values(self): self.test_string, arg_dict["clip_length"], arg_dict["rider"] ) - self.assertLess(len(result), len(self.test_string), "String was not clipped.") + assert len(result) < len(self.test_string), "String was not clipped." rider_returned = result[-len(arg_dict["rider"]) :] - self.assertEqual( - rider_returned, arg_dict["rider"], "Default rider was not applied." - ) + assert rider_returned == arg_dict["rider"], "Default rider was not applied." def test_trim_string_with_no_spaces(self): clip_length = 200 @@ -155,16 +141,18 @@ def test_trim_string_with_no_spaces(self): expected_length = clip_length + len(self.rider) # Check the length of the returned string is equal to clip length + length of rider - self.assertEqual( - len(result), - expected_length, + assert len(result) == expected_length, ( "Mismatch in expected length of clipped string. Length was %s, " - "expected value is %s" % (len(result), expected_length), + "expected value is %s" % (len(result), expected_length) ) -@tests.skip_data -class TestDescribeDiff(iris.tests.IrisTest): +@_shared_utils.skip_data +class TestDescribeDiff: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + def test_identical(self): test_cube_a = stock.realistic_4d() test_cube_b = stock.realistic_4d() @@ -173,7 +161,9 @@ def test_identical(self): iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() - self.assertString(return_str, "compatible_cubes.str.txt") + _shared_utils.assert_string( + self.request, return_str, "compatible_cubes.str.txt" + ) def test_different(self): # test incompatible attributes @@ -187,7 +177,9 @@ def test_different(self): iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() - self.assertString(return_str, "incompatible_attr.str.txt") + _shared_utils.assert_string( + self.request, return_str, "incompatible_attr.str.txt" + ) # test incompatible names test_cube_a = stock.realistic_4d() @@ -199,7 +191,9 @@ def test_different(self): iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() - self.assertString(return_str, "incompatible_name.str.txt") + _shared_utils.assert_string( + self.request, return_str, "incompatible_name.str.txt" + ) # test incompatible unit test_cube_a = stock.realistic_4d() @@ -211,7 +205,9 @@ def test_different(self): iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() - self.assertString(return_str, "incompatible_unit.str.txt") + _shared_utils.assert_string( + self.request, return_str, "incompatible_unit.str.txt" + ) # test incompatible methods test_cube_a = stock.realistic_4d() @@ -223,9 +219,11 @@ def test_different(self): iris.util.describe_diff(test_cube_a, test_cube_b, output_file=return_sio) return_str = return_sio.getvalue() - self.assertString(return_str, "incompatible_meth.str.txt") + _shared_utils.assert_string( + self.request, return_str, "incompatible_meth.str.txt" + ) - def test_output_file(self): + def test_output_file(self, tmp_path): # test incompatible attributes test_cube_a = stock.realistic_4d() test_cube_b = stock.realistic_4d().collapsed( @@ -237,13 +235,9 @@ def test_output_file(self): test_cube_a.standard_name = "relative_humidity" test_cube_a.units = cf_units.Unit("m") - with self.temp_filename() as filename: + with tmp_path / "tmp" as filename: with open(filename, "w") as f: iris.util.describe_diff(test_cube_a, test_cube_b, output_file=f) f.close() - self.assertFilesEqual(filename, "incompatible_cubes.str.txt") - - -if __name__ == "__main__": - tests.main() + _shared_utils.assert_files_equal(filename, "incompatible_cubes.str.txt") diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 61339b60ba..c505db8b8b 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -7,24 +7,20 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import AtmosphereSigmaFactory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) + self.sigma = mocker.Mock(units=Unit("1"), nbounds=0) + self.surface_air_pressure = mocker.Mock(units=Unit("Pa"), nbounds=0) self.kwargs = dict( pressure_at_top=self.pressure_at_top, sigma=self.sigma, @@ -32,11 +28,11 @@ def setUp(self): ) def test_insufficient_coordinates_no_args(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory() def test_insufficient_coordinates_no_ptop(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory( pressure_at_top=None, sigma=self.sigma, @@ -44,7 +40,7 @@ def test_insufficient_coordinates_no_ptop(self): ) def test_insufficient_coordinates_no_sigma(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=None, @@ -52,7 +48,7 @@ def test_insufficient_coordinates_no_sigma(self): ) def test_insufficient_coordinates_no_ps(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=self.sigma, @@ -67,7 +63,7 @@ def test_ptop_shapes(self): def test_ptop_invalid_shapes(self): for shape in [(2,), (1, 1)]: self.pressure_at_top.shape = shape - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_bounds(self): @@ -78,7 +74,7 @@ def test_sigma_bounds(self): def test_sigma_invalid_bounds(self): for n_bounds in [-1, 1, 3]: self.sigma.nbounds = n_bounds - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_units(self): @@ -89,7 +85,7 @@ def test_sigma_units(self): def test_sigma_invalid_units(self): for units in ["Pa", "m"]: self.sigma.units = Unit(units) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_ps_units(self): @@ -102,7 +98,7 @@ def test_ptop_ps_invalid_units(self): for units in [("Pa", "1"), ("1", "Pa"), ("bar", "Pa"), ("Pa", "hPa")]: self.pressure_at_top.units = Unit(units[0]) self.surface_air_pressure.units = Unit(units[1]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_units(self): @@ -115,27 +111,29 @@ def test_ptop_invalid_units(self): for units in ["1", "m", "kg", None]: self.pressure_at_top.units = Unit(units) self.surface_air_pressure.units = Unit(units) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): AtmosphereSigmaFactory(**self.kwargs) -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.kwargs = dict( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, +class Test_dependencies: + @pytest.fixture() + def sample_kwargs(self, mocker): + pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) + sigma = mocker.Mock(units=Unit("1"), nbounds=0) + surface_air_pressure = mocker.Mock(units=Unit("Pa"), nbounds=0) + kwargs = dict( + pressure_at_top=pressure_at_top, + sigma=sigma, + surface_air_pressure=surface_air_pressure, ) + return kwargs - def test_values(self): - factory = AtmosphereSigmaFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + def test_values(self, sample_kwargs): + factory = AtmosphereSigmaFactory(**sample_kwargs) + assert factory.dependencies == sample_kwargs -class Test__derive(tests.IrisTest): +class Test__derive: def test_function_scalar(self): assert AtmosphereSigmaFactory._derive(0, 0, 0) == 0 assert AtmosphereSigmaFactory._derive(3, 0, 0) == 3 @@ -156,7 +154,7 @@ def test_function_array(self): ) -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict( @@ -178,7 +176,8 @@ def derive(pressure_at_top, sigma, surface_air_pressure, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.pressure_at_top = AuxCoord( [3.0], long_name="pressure_at_top", @@ -225,14 +224,15 @@ def test_derived_coord(self): coord.bounds = None # Check points and metadata - self.assertEqual(expected_coord, coord) + assert coord == expected_coord -class Test_update(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) + self.sigma = mocker.Mock(units=Unit("1"), nbounds=0) + self.surface_air_pressure = mocker.Mock(units=Unit("Pa"), nbounds=0) self.kwargs = dict( pressure_at_top=self.pressure_at_top, sigma=self.sigma, @@ -240,41 +240,37 @@ def setUp(self): ) self.factory = AtmosphereSigmaFactory(**self.kwargs) - def test_pressure_at_top(self): - new_pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) + def test_pressure_at_top(self, mocker): + new_pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) self.factory.update(self.pressure_at_top, new_pressure_at_top) - self.assertIs(self.factory.pressure_at_top, new_pressure_at_top) + assert self.factory.pressure_at_top is new_pressure_at_top - def test_pressure_at_top_wrong_shape(self): - new_pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(2,)) - with self.assertRaises(ValueError): + def test_pressure_at_top_wrong_shape(self, mocker): + new_pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=(2,)) + with pytest.raises(ValueError): self.factory.update(self.pressure_at_top, new_pressure_at_top) - def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) + def test_sigma(self, mocker): + new_sigma = mocker.Mock(units=Unit("1"), nbounds=0) self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) + assert self.factory.sigma is new_sigma - def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + def test_sigma_too_many_bounds(self, mocker): + new_sigma = mocker.Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) - def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + def test_sigma_incompatible_units(self, mocker): + new_sigma = mocker.Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) - def test_surface_air_pressure(self): - new_surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) + def test_surface_air_pressure(self, mocker): + new_surface_air_pressure = mocker.Mock(units=Unit("Pa"), nbounds=0) self.factory.update(self.surface_air_pressure, new_surface_air_pressure) - self.assertIs(self.factory.surface_air_pressure, new_surface_air_pressure) + assert self.factory.surface_air_pressure is new_surface_air_pressure - def test_surface_air_pressure_incompatible_units(self): - new_surface_air_pressure = mock.Mock(units=Unit("mbar"), nbounds=0) - with self.assertRaises(ValueError): + def test_surface_air_pressure_incompatible_units(self, mocker): + new_surface_air_pressure = mocker.Mock(units=Unit("mbar"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.surface_air_pressure, new_surface_air_pressure) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index 5e136395b5..468e5f5d5a 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -6,25 +6,23 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest import iris from iris._lazy_data import as_lazy_data, is_lazy_data from iris.aux_factory import AuxCoordFactory from iris.coords import AuxCoord +from iris.tests._shared_utils import assert_array_equal, get_data_path, skip_data -class Test__nd_points(tests.IrisTest): +class Test__nd_points: def test_numpy_scalar_coord__zero_ndim(self): points = np.array(1) coord = AuxCoord(points) result = AuxCoordFactory._nd_points(coord, (), 0) expected = np.array([1]) - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_scalar_coord(self): value = 1 @@ -32,55 +30,55 @@ def test_numpy_scalar_coord(self): coord = AuxCoord(points) result = AuxCoordFactory._nd_points(coord, (), 2) expected = np.array(value).reshape(1, 1) - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_simple(self): points = np.arange(12).reshape(4, 3) coord = AuxCoord(points) result = AuxCoordFactory._nd_points(coord, (0, 1), 2) expected = points - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_complex(self): points = np.arange(12).reshape(4, 3) coord = AuxCoord(points) result = AuxCoordFactory._nd_points(coord, (3, 2), 5) expected = points.T[np.newaxis, np.newaxis, ..., np.newaxis] - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_lazy_simple(self): raw_points = np.arange(12).reshape(4, 3) points = as_lazy_data(raw_points, raw_points.shape) coord = AuxCoord(points) - self.assertTrue(is_lazy_data(coord.core_points())) + assert is_lazy_data(coord.core_points()) result = AuxCoordFactory._nd_points(coord, (0, 1), 2) # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_points())) - self.assertTrue(is_lazy_data(result)) + assert is_lazy_data(coord.core_points()) + assert is_lazy_data(result) expected = raw_points - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_lazy_complex(self): raw_points = np.arange(12).reshape(4, 3) points = as_lazy_data(raw_points, raw_points.shape) coord = AuxCoord(points) - self.assertTrue(is_lazy_data(coord.core_points())) + assert is_lazy_data(coord.core_points()) result = AuxCoordFactory._nd_points(coord, (3, 2), 5) # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_points())) - self.assertTrue(is_lazy_data(result)) + assert is_lazy_data(coord.core_points()) + assert is_lazy_data(result) expected = raw_points.T[np.newaxis, np.newaxis, ..., np.newaxis] - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) -class Test__nd_bounds(tests.IrisTest): +class Test__nd_bounds: def test_numpy_scalar_coord__zero_ndim(self): points = np.array(0.5) bounds = np.arange(2) coord = AuxCoord(points, bounds=bounds) result = AuxCoordFactory._nd_bounds(coord, (), 0) expected = bounds - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_scalar_coord(self): points = np.array(0.5) @@ -88,7 +86,7 @@ def test_numpy_scalar_coord(self): coord = AuxCoord(points, bounds=bounds) result = AuxCoordFactory._nd_bounds(coord, (), 2) expected = bounds[np.newaxis] - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_simple(self): points = np.arange(12).reshape(4, 3) @@ -96,7 +94,7 @@ def test_numpy_simple(self): coord = AuxCoord(points, bounds=bounds) result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2) expected = bounds - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_numpy_complex(self): points = np.arange(12).reshape(4, 3) @@ -104,7 +102,7 @@ def test_numpy_complex(self): coord = AuxCoord(points, bounds=bounds) result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5) expected = bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2) - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_lazy_simple(self): raw_points = np.arange(12).reshape(4, 3) @@ -112,13 +110,13 @@ def test_lazy_simple(self): raw_bounds = np.arange(24).reshape(4, 3, 2) bounds = as_lazy_data(raw_bounds, raw_bounds.shape) coord = AuxCoord(points, bounds=bounds) - self.assertTrue(is_lazy_data(coord.core_bounds())) + assert is_lazy_data(coord.core_bounds()) result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2) # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_bounds())) - self.assertTrue(is_lazy_data(result)) + assert is_lazy_data(coord.core_bounds()) + assert is_lazy_data(result) expected = raw_bounds - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) def test_lazy_complex(self): raw_points = np.arange(12).reshape(4, 3) @@ -126,39 +124,37 @@ def test_lazy_complex(self): raw_bounds = np.arange(24).reshape(4, 3, 2) bounds = as_lazy_data(raw_bounds, raw_bounds.shape) coord = AuxCoord(points, bounds=bounds) - self.assertTrue(is_lazy_data(coord.core_bounds())) + assert is_lazy_data(coord.core_bounds()) result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5) # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_bounds())) - self.assertTrue(is_lazy_data(result)) + assert is_lazy_data(coord.core_bounds()) + assert is_lazy_data(result) expected = raw_bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2) - self.assertArrayEqual(result, expected) + assert_array_equal(result, expected) -@tests.skip_data -class Test_lazy_aux_coords(tests.IrisTest): - def setUp(self): - path = tests.get_data_path(["NetCDF", "testing", "small_theta_colpex.nc"]) +@skip_data +class Test_lazy_aux_coords: + @pytest.fixture() + def sample_cube(self, mocker): + path = get_data_path(["NetCDF", "testing", "small_theta_colpex.nc"]) # While loading, "turn off" loading small variables as real data. - with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): - self.cube = iris.load_cube(path, "air_potential_temperature") + mocker.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0) + cube = iris.load_cube(path, "air_potential_temperature") + return cube - def _check_lazy(self): - coords = self.cube.aux_coords + self.cube.derived_coords + def _check_lazy(self, cube): + coords = cube.aux_coords + cube.derived_coords for coord in coords: - self.assertTrue(coord.has_lazy_points()) + assert coord.has_lazy_points() if coord.has_bounds(): - self.assertTrue(coord.has_lazy_bounds()) + assert coord.has_lazy_bounds() - def test_lazy_coord_loading(self): + def test_lazy_coord_loading(self, sample_cube): # Test that points and bounds arrays stay lazy upon cube loading. - self._check_lazy() + self._check_lazy(sample_cube) - def test_lazy_coord_printing(self): + def test_lazy_coord_printing(self, sample_cube): # Test that points and bounds arrays stay lazy after cube printing. - _ = str(self.cube) - self._check_lazy() - - -if __name__ == "__main__": - tests.main() + _ = str(sample_cube) + self._check_lazy(sample_cube) diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index d352cf663b..e3caf0c114 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -7,33 +7,40 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock import cf_units import numpy as np +import pytest import iris from iris.aux_factory import HybridPressureFactory -class Test___init__(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) +def create_default_sample_parts(self): + self.delta = Mock(units=cf_units.Unit("Pa"), nbounds=0) + self.sigma = Mock(units=cf_units.Unit("1"), nbounds=0) + self.surface_air_pressure = Mock(units=cf_units.Unit("Pa"), nbounds=0) + self.factory = HybridPressureFactory( + delta=self.delta, + sigma=self.sigma, + surface_air_pressure=self.surface_air_pressure, + ) + + +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + create_default_sample_parts(self) def test_insufficient_coords(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=None, sigma=self.sigma, surface_air_pressure=None ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=None, sigma=None, @@ -42,7 +49,7 @@ def test_insufficient_coords(self): def test_incompatible_delta_units(self): self.delta.units = cf_units.Unit("m") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -51,7 +58,7 @@ def test_incompatible_delta_units(self): def test_incompatible_sigma_units(self): self.sigma.units = cf_units.Unit("Pa") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -60,7 +67,7 @@ def test_incompatible_sigma_units(self): def test_incompatible_surface_air_pressure_units(self): self.surface_air_pressure.units = cf_units.Unit("unknown") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -70,7 +77,7 @@ def test_incompatible_surface_air_pressure_units(self): def test_different_pressure_units(self): self.delta.units = cf_units.Unit("hPa") self.surface_air_pressure.units = cf_units.Unit("Pa") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -79,7 +86,7 @@ def test_different_pressure_units(self): def test_too_many_delta_bounds(self): self.delta.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -88,7 +95,7 @@ def test_too_many_delta_bounds(self): def test_too_many_sigma_bounds(self): self.sigma.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -101,29 +108,28 @@ def test_factory_metadata(self): sigma=self.sigma, surface_air_pressure=self.surface_air_pressure, ) - self.assertEqual(factory.standard_name, "air_pressure") - self.assertIsNone(factory.long_name) - self.assertIsNone(factory.var_name) - self.assertEqual(factory.units, self.delta.units) - self.assertEqual(factory.units, self.surface_air_pressure.units) - self.assertIsNone(factory.coord_system) - self.assertEqual(factory.attributes, {}) + assert factory.standard_name == "air_pressure" + assert factory.long_name is None + assert factory.var_name is None + assert factory.units == self.delta.units + assert factory.units == self.surface_air_pressure.units + assert factory.coord_system is None + assert factory.attributes == {} def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=cf_units.Unit("unknown"), nbounds=0) + sigma = Mock(units=cf_units.Unit("unknown"), nbounds=0) factory = HybridPressureFactory( delta=self.delta, sigma=sigma, surface_air_pressure=self.surface_air_pressure, ) - self.assertEqual("1", factory.dependencies["sigma"].units) + assert factory.dependencies["sigma"].units == "1" -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + create_default_sample_parts(self) def test_value(self): kwargs = dict( @@ -132,16 +138,18 @@ def test_value(self): surface_air_pressure=self.surface_air_pressure, ) factory = HybridPressureFactory(**kwargs) - self.assertEqual(factory.dependencies, kwargs) + assert factory.dependencies == kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coords_dims_func(coord): mapping = dict(level_pressure=(0,), sigma=(0,), surface_air_pressure=(1, 2)) return mapping[coord.name()] - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): + # Create standard data objects for coord testing self.delta = iris.coords.DimCoord( [0.0, 1.0, 2.0], long_name="level_pressure", units="Pa" ) @@ -166,7 +174,7 @@ def test_points_only(self): surface_air_pressure=self.surface_air_pressure, ) derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) + assert derived_coord == expected_coord def test_none_delta(self): delta_pts = 0 @@ -177,10 +185,11 @@ def test_none_delta(self): expected_points, standard_name="air_pressure", units="Pa" ) factory = HybridPressureFactory( - sigma=self.sigma, surface_air_pressure=self.surface_air_pressure + sigma=self.sigma, + surface_air_pressure=self.surface_air_pressure, ) derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) + assert derived_coord == expected_coord def test_none_sigma(self): delta_pts = self.delta.points[..., np.newaxis, np.newaxis] @@ -191,10 +200,11 @@ def test_none_sigma(self): expected_points, standard_name="air_pressure", units="Pa" ) factory = HybridPressureFactory( - delta=self.delta, surface_air_pressure=self.surface_air_pressure + delta=self.delta, + surface_air_pressure=self.surface_air_pressure, ) derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) + assert derived_coord == expected_coord def test_none_surface_air_pressure(self): # Note absence of broadcasting as multidimensional coord @@ -205,7 +215,7 @@ def test_none_surface_air_pressure(self): ) factory = HybridPressureFactory(delta=self.delta, sigma=self.sigma) derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) + assert derived_coord == expected_coord def test_with_bounds(self): self.delta.guess_bounds(0) @@ -232,66 +242,55 @@ def test_with_bounds(self): surface_air_pressure=self.surface_air_pressure, ) derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) + assert derived_coord == expected_coord -class Test_update(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - - self.factory = HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + create_default_sample_parts(self) def test_good_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) + new_delta_coord = Mock(units=cf_units.Unit("Pa"), nbounds=0) self.factory.update(self.delta, new_delta_coord) - self.assertIs(self.factory.delta, new_delta_coord) + assert self.factory.delta is new_delta_coord def test_bad_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - with self.assertRaises(ValueError): + new_delta_coord = Mock(units=cf_units.Unit("1"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.delta, new_delta_coord) def test_alternative_bad_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=4) - with self.assertRaises(ValueError): + new_delta_coord = Mock(units=cf_units.Unit("Pa"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.delta, new_delta_coord) def test_good_surface_air_pressure(self): - new_surface_p_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) + new_surface_p_coord = Mock(units=cf_units.Unit("Pa"), nbounds=0) self.factory.update(self.surface_air_pressure, new_surface_p_coord) - self.assertIs(self.factory.surface_air_pressure, new_surface_p_coord) + assert self.factory.surface_air_pressure is new_surface_p_coord def test_bad_surface_air_pressure(self): - new_surface_p_coord = mock.Mock(units=cf_units.Unit("km"), nbounds=0) - with self.assertRaises(ValueError): + new_surface_p_coord = Mock(units=cf_units.Unit("km"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.surface_air_pressure, new_surface_p_coord) def test_non_dependency(self): - old_coord = mock.Mock() - new_coord = mock.Mock() + old_coord = Mock() + new_coord = Mock() orig_dependencies = self.factory.dependencies self.factory.update(old_coord, new_coord) - self.assertEqual(orig_dependencies, self.factory.dependencies) + assert self.factory.dependencies == orig_dependencies def test_none_delta(self): self.factory.update(self.delta, None) - self.assertIsNone(self.factory.delta) + assert self.factory.delta is None def test_none_sigma(self): self.factory.update(self.sigma, None) - self.assertIsNone(self.factory.sigma) + assert self.factory.sigma is None def test_insufficient_coords(self): self.factory.update(self.delta, None) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.factory.update(self.surface_air_pressure, None) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index e0955842bd..e702b8d2e2 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -7,27 +7,25 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import OceanSFactory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.a = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.b = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, eta=self.eta, @@ -38,9 +36,9 @@ def setUp(self): ) def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=None, eta=self.eta, @@ -49,7 +47,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=self.s, eta=None, @@ -58,7 +56,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=self.s, eta=self.eta, @@ -67,7 +65,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=self.s, eta=self.eta, @@ -76,7 +74,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=self.s, eta=self.eta, @@ -85,7 +83,7 @@ def test_insufficient_coordinates(self): b=None, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory( s=self.s, eta=self.eta, @@ -97,59 +95,60 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_a_non_scalar(self): self.a.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_b_non_scalar(self): self.b.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSFactory(**self.kwargs) def test_promote_s_units_unknown_to_dimensionless(self): - s = mock.Mock(units=Unit("unknown"), nbounds=0) + s = Mock(units=Unit("unknown"), nbounds=0) self.kwargs["s"] = s factory = OceanSFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + assert factory.dependencies["s"].units == "1" + + +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.a = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.b = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, eta=self.eta, @@ -161,10 +160,10 @@ def setUp(self): def test_values(self): factory = OceanSFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + assert factory.dependencies == self.kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict(s=(0,), eta=(1, 2), depth=(1, 2), a=(), b=(), depth_c=()) @@ -186,7 +185,8 @@ def derive(s, eta, depth, a, b, depth_c, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.s = DimCoord( np.arange(-0.975, 0, 0.05, dtype=float), units="1", long_name="s" ) @@ -225,17 +225,18 @@ def test_derived_points(self): # Calculate the actual result. factory = OceanSFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + assert coord == expected_coord + + +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.a = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.b = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, eta=self.eta, @@ -247,75 +248,71 @@ def setUp(self): self.factory = OceanSFactory(**self.kwargs) def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) + new_s = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) + assert self.factory.s is new_s def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) + new_eta = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) + assert self.factory.eta is new_eta def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_eta = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.eta, new_eta) def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) + new_depth = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) + assert self.factory.depth is new_depth def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_depth = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.depth, new_depth) def test_a(self): - new_a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) + new_a = Mock(units=Unit("1"), nbounds=0, shape=(1,)) self.factory.update(self.a, new_a) - self.assertIs(self.factory.a, new_a) + assert self.factory.a is new_a def test_a_non_scalar(self): - new_a = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_a = Mock(units=Unit("1"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.a, new_a) def test_b(self): - new_b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) + new_b = Mock(units=Unit("1"), nbounds=0, shape=(1,)) self.factory.update(self.b, new_b) - self.assertIs(self.factory.b, new_b) + assert self.factory.b is new_b def test_b_non_scalar(self): - new_b = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_b = Mock(units=Unit("1"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.b, new_b) def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) + assert self.factory.depth_c is new_depth_c def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 7cb42f7274..82e7cd2a7b 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -7,26 +7,24 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import OceanSg1Factory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -36,9 +34,9 @@ def setUp(self): ) def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory( s=None, c=self.c, @@ -46,7 +44,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory( s=self.s, c=None, @@ -54,7 +52,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory( s=self.s, c=self.c, @@ -62,7 +60,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory( s=self.s, c=self.c, @@ -70,7 +68,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory( s=self.s, c=self.c, @@ -81,61 +79,62 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg1Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): - c = mock.Mock(units=Unit("unknown"), nbounds=0) - s = mock.Mock(units=Unit("unknown"), nbounds=0) + c = Mock(units=Unit("unknown"), nbounds=0) + s = Mock(units=Unit("unknown"), nbounds=0) self.kwargs["c"] = c self.kwargs["s"] = s factory = OceanSg1Factory(**self.kwargs) - self.assertEqual("1", factory.dependencies["c"].units) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + assert factory.dependencies["c"].units == "1" + assert factory.dependencies["s"].units == "1" + + +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -146,10 +145,10 @@ def setUp(self): def test_values(self): factory = OceanSg1Factory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + assert factory.dependencies == self.kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict(s=(0,), c=(0,), eta=(1, 2), depth=(1, 2), depth_c=()) @@ -169,7 +168,8 @@ def derive(s, c, eta, depth, depth_c, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.s = DimCoord(np.linspace(-0.985, -0.014, 36), units="1", long_name="s") self.c = DimCoord(np.linspace(-0.959, -0.001, 36), units="1", long_name="c") self.eta = AuxCoord( @@ -203,16 +203,17 @@ def test_derived_points(self): # Calculate the actual result. factory = OceanSg1Factory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -223,70 +224,66 @@ def setUp(self): self.factory = OceanSg1Factory(**self.kwargs) def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) + new_s = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) + assert self.factory.s is new_s def test_c(self): - new_c = mock.Mock(units=Unit("1"), nbounds=0) + new_c = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.c, new_c) - self.assertIs(self.factory.c, new_c) + assert self.factory.c is new_c def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): - new_c = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_c = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): - new_c = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_c = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.c, new_c) def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) + new_eta = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) + assert self.factory.eta is new_eta def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_eta = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.eta, new_eta) def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) + new_depth = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) + assert self.factory.depth is new_depth def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_depth = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.depth, new_depth) def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) + assert self.factory.depth_c is new_depth_c def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index c3b5a3df1b..ecb8593e99 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -7,26 +7,24 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import OceanSg2Factory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -36,9 +34,9 @@ def setUp(self): ) def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory( s=None, c=self.c, @@ -46,7 +44,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory( s=self.s, c=None, @@ -54,7 +52,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory( s=self.s, c=self.c, @@ -62,7 +60,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory( s=self.s, c=self.c, @@ -70,7 +68,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory( s=self.s, c=self.c, @@ -81,61 +79,62 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSg2Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): - c = mock.Mock(units=Unit("unknown"), nbounds=0) - s = mock.Mock(units=Unit("unknown"), nbounds=0) + c = Mock(units=Unit("unknown"), nbounds=0) + s = Mock(units=Unit("unknown"), nbounds=0) self.kwargs["c"] = c self.kwargs["s"] = s factory = OceanSg2Factory(**self.kwargs) - self.assertEqual("1", factory.dependencies["c"].units) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + assert factory.dependencies["c"].units == "1" + assert factory.dependencies["s"].units == "1" + + +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -146,10 +145,10 @@ def setUp(self): def test_values(self): factory = OceanSg2Factory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + assert factory.dependencies == self.kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict(s=(0,), c=(0,), eta=(1, 2), depth=(1, 2), depth_c=()) @@ -169,7 +168,8 @@ def derive(s, c, eta, depth, depth_c, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.s = DimCoord(np.linspace(-0.985, -0.014, 36), units="1", long_name="s") self.c = DimCoord(np.linspace(-0.959, -0.001, 36), units="1", long_name="c") self.eta = AuxCoord( @@ -203,16 +203,17 @@ def test_derived_points(self): # Calculate the actual result. factory = OceanSg2Factory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + self.s = Mock(units=Unit("1"), nbounds=0) + self.c = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.kwargs = dict( s=self.s, c=self.c, @@ -223,70 +224,66 @@ def setUp(self): self.factory = OceanSg2Factory(**self.kwargs) def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) + new_s = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) + assert self.factory.s is new_s def test_c(self): - new_c = mock.Mock(units=Unit("1"), nbounds=0) + new_c = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.c, new_c) - self.assertIs(self.factory.c, new_c) + assert self.factory.c is new_c def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): - new_c = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_c = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_s = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): - new_c = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_c = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.c, new_c) def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) + new_eta = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) + assert self.factory.eta is new_eta def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_eta = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.eta, new_eta) def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) + new_depth = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) + assert self.factory.depth is new_depth def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_depth = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.depth, new_depth) def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) + assert self.factory.depth_c is new_depth_c def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index e8bf322305..910e897590 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -7,76 +7,75 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import OceanSigmaFactory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(sigma=None, eta=self.eta, depth=self.depth) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(sigma=self.sigma, eta=None, depth=self.depth) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(sigma=self.sigma, eta=self.eta, depth=None) def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=Unit("unknown"), nbounds=0) + sigma = Mock(units=Unit("unknown"), nbounds=0) self.kwargs["sigma"] = sigma factory = OceanSigmaFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["sigma"].units) + assert factory.dependencies["sigma"].units == "1" -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) def test_values(self): factory = OceanSigmaFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + assert factory.dependencies == self.kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict(sigma=(0,), eta=(1, 2), depth=(1, 2)) @@ -95,7 +94,8 @@ def derive(sigma, eta, depth, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.sigma = DimCoord(np.linspace(-0.05, -1, 5), long_name="sigma", units="1") self.eta = AuxCoord( np.arange(-1, 3, dtype=np.float64).reshape(2, 2), @@ -119,52 +119,49 @@ def test_derived_points(self): # Calculate the actual result. factory = OceanSigmaFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord -class Test_update(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) self.factory = OceanSigmaFactory(**self.kwargs) def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) + new_sigma = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) + assert self.factory.sigma is new_sigma def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_sigma = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_sigma = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) + new_eta = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) + assert self.factory.eta is new_eta def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_eta = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.eta, new_eta) def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) + new_depth = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) + assert self.factory.depth is new_depth def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_depth = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.depth, new_depth) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index 604daa9419..56991c01d9 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -7,27 +7,25 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +from unittest.mock import Mock from cf_units import Unit import numpy as np +import pytest from iris.aux_factory import OceanSigmaZFactory from iris.coords import AuxCoord, DimCoord -class Test___init__(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) + self.nsigma = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.zlev = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict( sigma=self.sigma, eta=self.eta, @@ -38,9 +36,9 @@ def setUp(self): ) def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -49,7 +47,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=None, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory( sigma=None, eta=None, @@ -58,7 +56,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -67,7 +65,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -76,7 +74,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -88,69 +86,70 @@ def test_insufficient_coordinates(self): def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_zlev_too_many_bounds(self): self.zlev.nbounds = 4 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_sigma_zlev_same_boundedness(self): self.zlev.nbounds = 2 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_nsigma_non_scalar(self): self.nsigma.shape = (4,) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_zlev_incompatible_units(self): self.zlev.units = Unit("Pa") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): OceanSigmaZFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=Unit("unknown"), nbounds=0) + sigma = Mock(units=Unit("unknown"), nbounds=0) self.kwargs["sigma"] = sigma factory = OceanSigmaZFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["sigma"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) + assert factory.dependencies["sigma"].units == "1" + + +class Test_dependencies: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) + self.nsigma = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.zlev = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict( sigma=self.sigma, eta=self.eta, @@ -162,10 +161,10 @@ def setUp(self): def test_values(self): factory = OceanSigmaZFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) + assert factory.dependencies == self.kwargs -class Test_make_coord(tests.IrisTest): +class Test_make_coord: @staticmethod def coord_dims(coord): mapping = dict( @@ -195,7 +194,8 @@ def derive(sigma, eta, depth, depth_c, nsigma, zlev, coord=True): ) return result - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.sigma = DimCoord( np.arange(5, dtype=np.float64) * 10, long_name="sigma", units="1" ) @@ -236,7 +236,7 @@ def test_derived_points(self): # Calculate the actual result. factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord def test_derived_points_with_bounds(self): self.sigma.guess_bounds() @@ -263,7 +263,7 @@ def test_derived_points_with_bounds(self): # Calculate the actual result. factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord def test_no_eta(self): # Broadcast expected points given the known dimensional mapping. @@ -279,7 +279,7 @@ def test_no_eta(self): self.kwargs["eta"] = None factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord def test_no_sigma(self): # Broadcast expected points given the known dimensional mapping. @@ -295,7 +295,7 @@ def test_no_sigma(self): self.kwargs["sigma"] = None factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord def test_no_depth_c(self): # Broadcast expected points given the known dimensional mapping. @@ -311,7 +311,7 @@ def test_no_depth_c(self): self.kwargs["depth_c"] = None factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) + assert coord == expected_coord def test_no_depth(self): # Broadcast expected points given the known dimensional mapping. @@ -327,17 +327,18 @@ def test_no_depth(self): self.kwargs["depth"] = None factory = OceanSigmaZFactory(**self.kwargs) coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) + assert coord == expected_coord + + +class Test_update: + @pytest.fixture(autouse=True) + def _setup(self): + self.sigma = Mock(units=Unit("1"), nbounds=0) + self.eta = Mock(units=Unit("m"), nbounds=0) + self.depth = Mock(units=Unit("m"), nbounds=0) + self.depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) + self.nsigma = Mock(units=Unit("1"), nbounds=0, shape=(1,)) + self.zlev = Mock(units=Unit("m"), nbounds=0) self.kwargs = dict( sigma=self.sigma, eta=self.eta, @@ -349,98 +350,94 @@ def setUp(self): self.factory = OceanSigmaZFactory(**self.kwargs) def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) + new_sigma = Mock(units=Unit("1"), nbounds=0) self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) + assert self.factory.sigma is new_sigma def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): + new_sigma = Mock(units=Unit("1"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) def test_sigma_zlev_same_boundedness(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=2) - with self.assertRaises(ValueError): + new_sigma = Mock(units=Unit("1"), nbounds=2) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_sigma = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.sigma, new_sigma) def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) + new_eta = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) + assert self.factory.eta is new_eta def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_eta = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.eta, new_eta) def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) + new_depth = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) + assert self.factory.depth is new_depth def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_depth = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.depth, new_depth) def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(1,)) self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) + assert self.factory.depth_c is new_depth_c def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): + new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) + with pytest.raises(ValueError): self.factory.update(self.depth_c, new_depth_c) def test_nsigma(self): - new_nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) + new_nsigma = Mock(units=Unit("1"), nbounds=0, shape=(1,)) self.factory.update(self.nsigma, new_nsigma) - self.assertIs(self.factory.nsigma, new_nsigma) + assert self.factory.nsigma is new_nsigma def test_nsigma_missing(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.factory.update(self.nsigma, None) def test_nsigma_non_scalar(self): - new_nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): + new_nsigma = Mock(units=Unit("1"), nbounds=0, shape=(10,)) + with pytest.raises(ValueError): self.factory.update(self.nsigma, new_nsigma) def test_zlev(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=0) + new_zlev = Mock(units=Unit("m"), nbounds=0) self.factory.update(self.zlev, new_zlev) - self.assertIs(self.factory.zlev, new_zlev) + assert self.factory.zlev is new_zlev def test_zlev_missing(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.factory.update(self.zlev, None) def test_zlev_too_many_bounds(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=4) - with self.assertRaises(ValueError): + new_zlev = Mock(units=Unit("m"), nbounds=4) + with pytest.raises(ValueError): self.factory.update(self.zlev, new_zlev) def test_zlev_same_boundedness(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=2) - with self.assertRaises(ValueError): + new_zlev = Mock(units=Unit("m"), nbounds=2) + with pytest.raises(ValueError): self.factory.update(self.zlev, new_zlev) def test_zlev_incompatible_units(self): - new_zlev = new_zlev = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): + new_zlev = new_zlev = Mock(units=Unit("Pa"), nbounds=0) + with pytest.raises(ValueError): self.factory.update(self.zlev, new_zlev) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index 375a745ce8..cbc1c8fe1f 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -4,179 +4,166 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.lenient.Lenient`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest -from unittest.mock import sentinel +from iris.common.lenient import _LENIENT, _LENIENT_PROTECTED, Lenient -from iris.common.lenient import _LENIENT, Lenient +@pytest.fixture() +def lenient(): + # setup + state = {key: _LENIENT.__dict__[key] for key in _LENIENT_PROTECTED} + # call + yield Lenient() + # teardown + for key, value in state.items(): + _LENIENT.__dict__[key] = value -class Test___init__(tests.IrisTest): - def test_default(self): - lenient = Lenient() + +class Test___init__: + def test_default(self, lenient): expected = dict(maths=True) - self.assertEqual(expected, lenient.__dict__) + assert lenient.__dict__ == expected - def test_kwargs(self): - lenient = Lenient(maths=False) + def test_kwargs(self, lenient): + actual = Lenient(maths=False) expected = dict(maths=False) - self.assertEqual(expected, lenient.__dict__) + assert actual.__dict__ == expected - def test_kwargs_invalid(self): + def test_kwargs_invalid(self, lenient): emsg = "Invalid .* option, got 'merge'." - with self.assertRaisesRegex(KeyError, emsg): + with pytest.raises(KeyError, match=emsg): _ = Lenient(merge=True) -class Test___contains__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_in(self): - self.assertIn("maths", self.lenient) +class Test___contains__: + def test_in(self, lenient): + assert "maths" in lenient - def test_not_in(self): - self.assertNotIn("concatenate", self.lenient) + def test_not_in(self, lenient): + assert "concatenate" not in lenient -class Test___getitem__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() +class Test___getitem__: + def test_in(self, lenient): + assert bool(lenient["maths"]) is True - def test_in(self): - self.assertTrue(self.lenient["maths"]) - - def test_not_in(self): + def test_not_in(self, lenient): emsg = "Invalid .* option, got 'MATHS'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient["MATHS"] - + with pytest.raises(KeyError, match=emsg): + _ = lenient["MATHS"] -class Test___repr__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - def test(self): +class Test___repr__: + def test(self, lenient): expected = "Lenient(maths=True)" - self.assertEqual(expected, repr(self.lenient)) + assert repr(lenient) == expected -class Test___setitem__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_key_invalid(self): +class Test___setitem__: + def test_key_invalid(self, lenient): emsg = "Invalid .* option, got 'MATHS." - with self.assertRaisesRegex(KeyError, emsg): - self.lenient["MATHS"] = False + with pytest.raises(KeyError, match=emsg): + lenient["MATHS"] = False - def test_maths_value_invalid(self): - value = sentinel.value + def test_maths_value_invalid(self, mocker, lenient): + value = mocker.sentinel.value emsg = f"Invalid .* option 'maths' value, got {value!r}." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient["maths"] = value + with pytest.raises(ValueError, match=emsg): + lenient["maths"] = value - def test_maths_disable__lenient_enable_true(self): - self.assertTrue(_LENIENT.enable) - self.lenient["maths"] = False - self.assertFalse(self.lenient.__dict__["maths"]) - self.assertFalse(_LENIENT.enable) + def test_maths_disable__lenient_enable_true(self, lenient): + assert bool(_LENIENT.enable) is True + lenient["maths"] = False + assert bool(lenient.__dict__["maths"]) is False + assert bool(_LENIENT.enable) is False - def test_maths_disable__lenient_enable_false(self): + def test_maths_disable__lenient_enable_false(self, lenient): _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient["maths"] = False - self.assertFalse(self.lenient.__dict__["maths"]) - self.assertFalse(_LENIENT.enable) - - def test_maths_enable__lenient_enable_true(self): - self.assertTrue(_LENIENT.enable) - self.lenient["maths"] = True - self.assertTrue(self.lenient.__dict__["maths"]) - self.assertTrue(_LENIENT.enable) - - def test_maths_enable__lenient_enable_false(self): + assert bool(_LENIENT.enable) is False + lenient["maths"] = False + assert bool(lenient.__dict__["maths"]) is False + assert bool(_LENIENT.enable) is False + + def test_maths_enable__lenient_enable_true(self, lenient): + assert bool(_LENIENT.enable) is True + lenient["maths"] = True + assert bool(lenient.__dict__["maths"]) is True + assert bool(_LENIENT.enable) is True + + def test_maths_enable__lenient_enable_false(self, lenient): _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient["maths"] = True - self.assertTrue(self.lenient.__dict__["maths"]) - self.assertTrue(_LENIENT.enable) - + assert bool(_LENIENT.enable) is False + lenient["maths"] = True + assert bool(lenient.__dict__["maths"]) is True + assert bool(_LENIENT.enable) is True -class Test_context(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - def test_nop(self): - self.assertTrue(self.lenient["maths"]) +class Test_context: + def test_nop(self, lenient): + assert bool(lenient["maths"]) is True - with self.lenient.context(): - self.assertTrue(self.lenient["maths"]) + with lenient.context(): + assert bool(lenient["maths"]) is True - self.assertTrue(self.lenient["maths"]) + assert bool(lenient["maths"]) is True - def test_maths_disable__lenient_true(self): + def test_maths_disable__lenient_true(self, lenient): # synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + assert bool(lenient["maths"]) is True - with self.lenient.context(maths=False): + with lenient.context(maths=False): # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) + assert bool(_LENIENT.enable) is False + assert bool(lenient["maths"]) is False # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + assert bool(lenient["maths"]) is True - def test_maths_disable__lenient_false(self): + def test_maths_disable__lenient_false(self, lenient): # not synchronised _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is False + assert bool(lenient["maths"]) is True - with self.lenient.context(maths=False): + with lenient.context(maths=False): # now synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) + assert bool(_LENIENT.enable) is False + assert bool(lenient["maths"]) is False # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + assert bool(lenient["maths"]) is True - def test_maths_enable__lenient_true(self): + def test_maths_enable__lenient_true(self, lenient): # not synchronised - self.assertTrue(_LENIENT.enable) - self.lenient.__dict__["maths"] = False - self.assertFalse(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + lenient.__dict__["maths"] = False + assert bool(lenient["maths"]) is False - with self.lenient.context(maths=True): + with lenient.context(maths=True): # now synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + assert bool(lenient["maths"]) is True # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) + assert bool(_LENIENT.enable) is False + assert bool(lenient["maths"]) is False - def test_maths_enable__lenient_false(self): + def test_maths_enable__lenient_false(self, lenient): # synchronised _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient.__dict__["maths"] = False - self.assertFalse(self.lenient["maths"]) + assert bool(_LENIENT.enable) is False + lenient.__dict__["maths"] = False + assert bool(lenient["maths"]) is False - with self.lenient.context(maths=True): + with lenient.context(maths=True): # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) + assert bool(_LENIENT.enable) is True + assert bool(lenient["maths"]) is True # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) - - -if __name__ == "__main__": - tests.main() + assert bool(_LENIENT.enable) is False + assert bool(lenient["maths"]) is False diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index 814359fbaf..bd19c3922e 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -4,12 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.lenient._Lenient`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from collections.abc import Iterable +import pytest + from iris.common.lenient import ( _LENIENT_ENABLE_DEFAULT, _LENIENT_PROTECTED, @@ -18,25 +16,30 @@ ) -class Test___init__(tests.IrisTest): - def setUp(self): +@pytest.fixture() +def lenient(): + return _Lenient() + + +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self): self.expected = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) - def test_default(self): - lenient = _Lenient() - self.assertEqual(self.expected, lenient.__dict__) + def test_default(self, lenient): + assert lenient.__dict__ == self.expected def test_args_service_str(self): service = "service1" lenient = _Lenient(service) self.expected.update(dict(service1=True)) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected def test_args_services_str(self): services = ("service1", "service2") lenient = _Lenient(*services) self.expected.update(dict(service1=True, service2=True)) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected def test_args_services_callable(self): def service1(): @@ -48,19 +51,19 @@ def service2(): services = (service1, service2) lenient = _Lenient(*services) self.expected.update({_qualname(service1): True, _qualname(service2): True}) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected def test_kwargs_client_str(self): client = dict(client1="service1") lenient = _Lenient(**client) self.expected.update(dict(client1=("service1",))) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected def test_kwargs_clients_str(self): clients = dict(client1="service1", client2="service2") lenient = _Lenient(**clients) self.expected.update(dict(client1=("service1",), client2=("service2",))) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected def test_kwargs_clients_callable(self): def client1(): @@ -88,27 +91,28 @@ def service2(): _qualname(client2): (_qualname(service1), _qualname(service2)), } ) - self.assertEqual(self.expected, lenient.__dict__) + assert lenient.__dict__ == self.expected -class Test___call__(tests.IrisTest): - def setUp(self): +class Test___call__: + @pytest.fixture(autouse=True) + def _setup(self): self.client = "myclient" self.lenient = _Lenient() def test_missing_service_str(self): - self.assertFalse(self.lenient("myservice")) + assert not self.lenient("myservice") def test_missing_service_callable(self): def myservice(): pass - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) def test_disabled_service_str(self): service = "myservice" self.lenient.__dict__[service] = False - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) def test_disable_service_callable(self): def myservice(): @@ -116,12 +120,12 @@ def myservice(): qualname_service = _qualname(myservice) self.lenient.__dict__[qualname_service] = False - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) def test_service_str_with_no_active_client(self): service = "myservice" self.lenient.__dict__[service] = True - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) def test_service_callable_with_no_active_client(self): def myservice(): @@ -129,13 +133,13 @@ def myservice(): qualname_service = _qualname(myservice) self.lenient.__dict__[qualname_service] = True - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) def test_service_str_with_active_client_with_no_registered_services(self): service = "myservice" self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) def test_service_callable_with_active_client_with_no_registered_services( self, @@ -149,16 +153,14 @@ def myclient(): qualname_service = _qualname(myservice) self.lenient.__dict__[qualname_service] = True self.lenient.__dict__["active"] = _qualname(myclient) - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) - def test_service_str_with_active_client_with_unmatched_registered_services( - self, - ): + def test_service_str_with_active_client_with_unmatched_registered_services(self): service = "myservice" self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client self.lenient.__dict__[self.client] = ("service1", "service2") - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) def test_service_callable_with_active_client_with_unmatched_registered_services( self, @@ -174,18 +176,16 @@ def myclient(): self.lenient.__dict__[qualname_service] = True self.lenient.__dict__["active"] = qualname_client self.lenient.__dict__[qualname_client] = ("service1", "service2") - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) def test_service_str_with_active_client_with_registered_services(self): service = "myservice" self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client self.lenient.__dict__[self.client] = ("service1", "service2", service) - self.assertTrue(self.lenient(service)) + assert self.lenient(service) - def test_service_callable_with_active_client_with_registered_services( - self, - ): + def test_service_callable_with_active_client_with_registered_services(self): def myservice(): pass @@ -201,7 +201,7 @@ def myclient(): "service2", qualname_service, ) - self.assertTrue(self.lenient(myservice)) + assert self.lenient(myservice) def test_service_str_with_active_client_with_unmatched_registered_service_str( self, @@ -210,7 +210,7 @@ def test_service_str_with_active_client_with_unmatched_registered_service_str( self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client self.lenient.__dict__[self.client] = "serviceXXX" - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) def test_service_callable_with_active_client_with_unmatched_registered_service_str( self, @@ -226,14 +226,14 @@ def myclient(): self.lenient.__dict__[qualname_service] = True self.lenient.__dict__["active"] = qualname_client self.lenient.__dict__[qualname_client] = f"{qualname_service}XXX" - self.assertFalse(self.lenient(myservice)) + assert not self.lenient(myservice) def test_service_str_with_active_client_with_registered_service_str(self): service = "myservice" self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client self.lenient.__dict__[self.client] = service - self.assertTrue(self.lenient(service)) + assert self.lenient(service) def test_service_callable_with_active_client_with_registered_service_str( self, @@ -249,119 +249,106 @@ def myclient(): self.lenient.__dict__[qualname_service] = True self.lenient.__dict__["active"] = qualname_client self.lenient.__dict__[qualname_client] = qualname_service - self.assertTrue(self.lenient(myservice)) + assert self.lenient(myservice) def test_enable(self): service = "myservice" self.lenient.__dict__[service] = True self.lenient.__dict__["active"] = self.client self.lenient.__dict__[self.client] = service - self.assertTrue(self.lenient(service)) + assert self.lenient(service) self.lenient.__dict__["enable"] = False - self.assertFalse(self.lenient(service)) + assert not self.lenient(service) -class Test___contains__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() +class Test___contains__: + def test_in(self, lenient): + assert "active" in lenient - def test_in(self): - self.assertIn("active", self.lenient) + def test_not_in(self, lenient): + assert "ACTIVATE" not in lenient - def test_not_in(self): - self.assertNotIn("ACTIVATE", self.lenient) - - def test_in_qualname(self): + def test_in_qualname(self, lenient): def func(): pass qualname_func = _qualname(func) - lenient = _Lenient() lenient.__dict__[qualname_func] = None - self.assertIn(func, lenient) - self.assertIn(qualname_func, lenient) - + assert func in lenient + assert qualname_func in lenient -class Test___getattr__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - def test_in(self): - self.assertIsNone(self.lenient.active) +class Test___getattr__: + def test_in(self, lenient): + assert lenient.active is None - def test_not_in(self): + def test_not_in(self, lenient): emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(AttributeError, emsg): - _ = self.lenient.wibble - + with pytest.raises(AttributeError, match=emsg): + _ = lenient.wibble -class Test__getitem__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - def test_in(self): - self.assertIsNone(self.lenient["active"]) +class Test__getitem__: + def test_in(self, lenient): + assert lenient["active"] is None - def test_in_callable(self): + def test_in_callable(self, lenient): def service(): pass qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = True - self.assertTrue(self.lenient[service]) + lenient.__dict__[qualname_service] = True + assert lenient[service] - def test_not_in(self): + def test_not_in(self, lenient): emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient["wibble"] + with pytest.raises(KeyError, match=emsg): + _ = lenient["wibble"] - def test_not_in_callable(self): + def test_not_in_callable(self, lenient): def service(): pass qualname_service = _qualname(service) emsg = f"Invalid .* option, got '{qualname_service}'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient[service] + with pytest.raises(KeyError, match=emsg): + _ = lenient[service] -class Test___setitem__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_in(self): +class Test___setitem__: + def test_not_in(self, lenient): emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(KeyError, emsg): - self.lenient["wibble"] = None + with pytest.raises(KeyError, match=emsg): + lenient["wibble"] = None - def test_in_value_str(self): + def test_in_value_str(self, lenient): client = "client" service = "service" - self.lenient.__dict__[client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[client], (service,)) + lenient.__dict__[client] = None + lenient[client] = service + assert lenient.__dict__[client] == (service,) - def test_callable_in_value_str(self): + def test_callable_in_value_str(self, lenient): def client(): pass service = "service" qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[qualname_client], (service,)) + lenient.__dict__[qualname_client] = None + lenient[client] = service + assert lenient.__dict__[qualname_client] == (service,) - def test_in_value_callable(self): + def test_in_value_callable(self, lenient): def service(): pass client = "client" qualname_service = _qualname(service) - self.lenient.__dict__[client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[client], (qualname_service,)) + lenient.__dict__[client] = None + lenient[client] = service + assert lenient.__dict__[client] == (qualname_service,) - def test_callable_in_value_callable(self): + def test_callable_in_value_callable(self, lenient): def client(): pass @@ -370,45 +357,45 @@ def service(): qualname_client = _qualname(client) qualname_service = _qualname(service) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[qualname_client], (qualname_service,)) + lenient.__dict__[qualname_client] = None + lenient[client] = service + assert lenient.__dict__[qualname_client] == (qualname_service,) - def test_in_value_bool(self): + def test_in_value_bool(self, lenient): client = "client" - self.lenient.__dict__[client] = None - self.lenient[client] = True - self.assertTrue(self.lenient.__dict__[client]) - self.assertFalse(isinstance(self.lenient.__dict__[client], Iterable)) + lenient.__dict__[client] = None + lenient[client] = True + assert lenient.__dict__[client] + assert not isinstance(lenient.__dict__[client], Iterable) - def test_callable_in_value_bool(self): + def test_callable_in_value_bool(self, lenient): def client(): pass qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = True - self.assertTrue(self.lenient.__dict__[qualname_client]) - self.assertFalse(isinstance(self.lenient.__dict__[qualname_client], Iterable)) + lenient.__dict__[qualname_client] = None + lenient[client] = True + assert lenient.__dict__[qualname_client] + assert not isinstance(lenient.__dict__[qualname_client], Iterable) - def test_in_value_iterable(self): + def test_in_value_iterable(self, lenient): client = "client" services = ("service1", "service2") - self.lenient.__dict__[client] = None - self.lenient[client] = services - self.assertEqual(self.lenient.__dict__[client], services) + lenient.__dict__[client] = None + lenient[client] = services + assert lenient.__dict__[client] == services - def test_callable_in_value_iterable(self): + def test_callable_in_value_iterable(self, lenient): def client(): pass qualname_client = _qualname(client) services = ("service1", "service2") - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = services - self.assertEqual(self.lenient.__dict__[qualname_client], services) + lenient.__dict__[qualname_client] = None + lenient[client] = services + assert lenient.__dict__[qualname_client] == services - def test_in_value_iterable_callable(self): + def test_in_value_iterable_callable(self, lenient): def service1(): pass @@ -416,12 +403,12 @@ def service2(): pass client = "client" - self.lenient.__dict__[client] = None + lenient.__dict__[client] = None qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient[client] = (service1, service2) - self.assertEqual(self.lenient.__dict__[client], qualname_services) + lenient[client] = (service1, service2) + assert lenient.__dict__[client] == qualname_services - def test_callable_in_value_iterable_callable(self): + def test_callable_in_value_iterable_callable(self, lenient): def client(): pass @@ -432,51 +419,53 @@ def service2(): pass qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None + lenient.__dict__[qualname_client] = None qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient[client] = (service1, service2) - self.assertEqual(self.lenient.__dict__[qualname_client], qualname_services) + lenient[client] = (service1, service2) + assert lenient.__dict__[qualname_client] == qualname_services - def test_active_iterable(self): + def test_active_iterable(self, lenient): active = "active" - self.assertIsNone(self.lenient.__dict__[active]) + assert lenient.__dict__[active] is None + emsg = "Invalid .* option 'active'" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient[active] = (None,) + with pytest.raises(ValueError, match=emsg): + lenient[active] = (None,) - def test_active_str(self): + def test_active_str(self, lenient): active = "active" client = "client1" - self.assertIsNone(self.lenient.__dict__[active]) - self.lenient[active] = client - self.assertEqual(self.lenient.__dict__[active], client) + assert lenient.__dict__[active] is None + lenient[active] = client + assert lenient.__dict__[active] == client - def test_active_callable(self): + def test_active_callable(self, lenient): def client(): pass active = "active" qualname_client = _qualname(client) - self.assertIsNone(self.lenient.__dict__[active]) - self.lenient[active] = client - self.assertEqual(self.lenient.__dict__[active], qualname_client) + assert lenient.__dict__[active] is None + lenient[active] = client + assert lenient.__dict__[active] == qualname_client - def test_enable(self): + def test_enable(self, lenient): enable = "enable" - self.assertEqual(self.lenient.__dict__[enable], _LENIENT_ENABLE_DEFAULT) - self.lenient[enable] = True - self.assertTrue(self.lenient.__dict__[enable]) - self.lenient[enable] = False - self.assertFalse(self.lenient.__dict__[enable]) + assert lenient.__dict__[enable] == _LENIENT_ENABLE_DEFAULT + lenient[enable] = True + assert lenient.__dict__[enable] + lenient[enable] = False + assert not lenient.__dict__[enable] - def test_enable_invalid(self): + def test_enable_invalid(self, lenient): emsg = "Invalid .* option 'enable'" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient["enable"] = None + with pytest.raises(ValueError, match=emsg): + lenient["enable"] = None -class Test_context(tests.IrisTest): - def setUp(self): +class Test_context: + @pytest.fixture(autouse=True) + def _setup(self): self.lenient = _Lenient() self.default = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) @@ -488,9 +477,9 @@ def test_nop(self): with self.lenient.context(): context = self.copy() post = self.copy() - self.assertEqual(pre, self.default) - self.assertEqual(context, self.default) - self.assertEqual(post, self.default) + assert pre == self.default + assert context == self.default + assert post == self.default def test_active_str(self): client = "client" @@ -498,11 +487,11 @@ def test_active_str(self): with self.lenient.context(active=client): context = self.copy() post = self.copy() - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active=client)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) + assert context == expected + assert post == self.default def test_active_callable(self): def client(): @@ -513,11 +502,11 @@ def client(): context = self.copy() post = self.copy() qualname_client = _qualname(client) - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active=qualname_client)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) + assert context == expected + assert post == self.default def test_kwargs(self): client = "client" @@ -528,11 +517,11 @@ def test_kwargs(self): context = self.copy() post = self.copy() self.default.update(dict(service1=False, service2=False)) - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active=client, service1=True, service2=True)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) + assert context == expected + assert post == self.default def test_args_str(self): client = "client" @@ -541,12 +530,12 @@ def test_args_str(self): with self.lenient.context(*services, active=client): context = self.copy() post = self.copy() - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active=client, client=services)) - self.assertEqual(context["active"], expected["active"]) - self.assertEqual(set(context["client"]), set(expected["client"])) - self.assertEqual(post, self.default) + assert context["active"] == expected["active"] + assert set(context["client"]) == set(expected["client"]) + assert post == self.default def test_args_callable(self): def service1(): @@ -562,12 +551,12 @@ def service2(): context = self.copy() post = self.copy() qualname_services = tuple([_qualname(service) for service in services]) - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active=client, client=qualname_services)) - self.assertEqual(context["active"], expected["active"]) - self.assertEqual(set(context["client"]), set(expected["client"])) - self.assertEqual(post, self.default) + assert context["active"] == expected["active"] + assert set(context["client"]) == set(expected["client"]) + assert post == self.default def test_context_runtime(self): services = ("service1", "service2") @@ -575,56 +564,54 @@ def test_context_runtime(self): with self.lenient.context(*services): context = self.copy() post = self.copy() - self.assertEqual(pre, self.default) + assert pre == self.default expected = self.default.copy() expected.update(dict(active="__context", __context=services)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) + assert context == expected + assert post == self.default -class Test_enable(tests.IrisTest): - def setUp(self): +class Test_enable: + @pytest.fixture(autouse=True) + def _setup(self): self.lenient = _Lenient() def test_getter(self): - self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) + assert self.lenient.enable == _LENIENT_ENABLE_DEFAULT def test_setter_invalid(self): emsg = "Invalid .* option 'enable'" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.lenient.enable = 0 def test_setter(self): - self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) + assert self.lenient.enable == _LENIENT_ENABLE_DEFAULT self.lenient.enable = False - self.assertFalse(self.lenient.enable) + assert not self.lenient.enable -class Test_register_client(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): +class Test_register_client: + def test_not_protected(self, lenient): emsg = "Cannot register .* client" for protected in _LENIENT_PROTECTED: - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_client(protected, "service") + with pytest.raises(ValueError, match=emsg): + lenient.register_client(protected, "service") - def test_str_service_str(self): + def test_str_service_str(self, lenient): client = "client" services = "service" - self.lenient.register_client(client, services) - self.assertIn(client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[client], (services,)) + lenient.register_client(client, services) + assert client in lenient.__dict__ + assert lenient.__dict__[client] == (services,) - def test_str_services_str(self): + def test_str_services_str(self, lenient): client = "client" services = ("service1", "service2") - self.lenient.register_client(client, services) - self.assertIn(client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[client], services) + lenient.register_client(client, services) + assert client in lenient.__dict__ + assert lenient.__dict__[client] == services - def test_callable_service_callable(self): + def test_callable_service_callable(self, lenient): def client(): pass @@ -633,11 +620,11 @@ def service(): qualname_client = _qualname(client) qualname_service = _qualname(service) - self.lenient.register_client(client, service) - self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[qualname_client], (qualname_service,)) + lenient.register_client(client, service) + assert qualname_client in lenient.__dict__ + assert lenient.__dict__[qualname_client] == (qualname_service,) - def test_callable_services_callable(self): + def test_callable_services_callable(self, lenient): def client(): pass @@ -649,163 +636,150 @@ def service2(): qualname_client = _qualname(client) qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient.register_client(client, (service1, service2)) - self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[qualname_client], qualname_services) + lenient.register_client(client, (service1, service2)) + assert qualname_client in lenient.__dict__ + assert lenient.__dict__[qualname_client] == qualname_services - def test_services_empty(self): + def test_services_empty(self, lenient): emsg = "Require at least one .* client service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_client("client", ()) + with pytest.raises(ValueError, match=emsg): + lenient.register_client("client", ()) - def test_services_overwrite(self): + def test_services_overwrite(self, lenient): client = "client" services = ("service1", "service2") - self.lenient.__dict__[client] = services - self.assertEqual(self.lenient[client], services) + lenient.__dict__[client] = services + assert lenient[client] == services new_services = ("service3", "service4") - self.lenient.register_client(client, services=new_services) - self.assertEqual(self.lenient[client], new_services) + lenient.register_client(client, services=new_services) + assert lenient[client] == new_services - def test_services_append(self): + def test_services_append(self, lenient): client = "client" services = ("service1", "service2") - self.lenient.__dict__[client] = services - self.assertEqual(self.lenient[client], services) + lenient.__dict__[client] = services + assert lenient[client] == services new_services = ("service3", "service4") - self.lenient.register_client(client, services=new_services, append=True) + lenient.register_client(client, services=new_services, append=True) expected = set(services + new_services) - self.assertEqual(set(self.lenient[client]), expected) + assert set(lenient[client]) == expected -class Test_register_service(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_str(self): +class Test_register_service: + def test_str(self, lenient): service = "service" - self.assertNotIn(service, self.lenient.__dict__) - self.lenient.register_service(service) - self.assertIn(service, self.lenient.__dict__) - self.assertFalse(isinstance(self.lenient.__dict__[service], Iterable)) - self.assertTrue(self.lenient.__dict__[service]) + assert service not in lenient.__dict__ + lenient.register_service(service) + assert service in lenient.__dict__ + assert not isinstance(lenient.__dict__[service], Iterable) + assert lenient.__dict__[service] - def test_callable(self): + def test_callable(self, lenient): def service(): pass qualname_service = _qualname(service) - self.assertNotIn(qualname_service, self.lenient.__dict__) - self.lenient.register_service(service) - self.assertIn(qualname_service, self.lenient.__dict__) - self.assertFalse(isinstance(self.lenient.__dict__[qualname_service], Iterable)) - self.assertTrue(self.lenient.__dict__[qualname_service]) + assert qualname_service not in lenient.__dict__ + lenient.register_service(service) + assert qualname_service in lenient.__dict__ + assert not isinstance(lenient.__dict__[qualname_service], Iterable) + assert lenient.__dict__[qualname_service] - def test_not_protected(self): + def test_not_protected(self, lenient): emsg = "Cannot register .* service" for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_service("active") + lenient.__dict__[protected] = None + with pytest.raises(ValueError, match=emsg): + lenient.register_service("active") -class Test_unregister_client(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): +class Test_unregister_client: + def test_not_protected(self, lenient): emsg = "Cannot unregister .* client, as .* is a protected .* option." for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(protected) + lenient.__dict__[protected] = None + with pytest.raises(ValueError, match=emsg): + lenient.unregister_client(protected) - def test_not_in(self): + def test_not_in(self, lenient): emsg = "Cannot unregister unknown .* client" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client("client") + with pytest.raises(ValueError, match=emsg): + lenient.unregister_client("client") - def test_not_client(self): + def test_not_client(self, lenient): client = "client" - self.lenient.__dict__[client] = True + lenient.__dict__[client] = True emsg = "Cannot unregister .* client, as .* is not a valid .* client." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(client) + with pytest.raises(ValueError, match=emsg): + lenient.unregister_client(client) - def test_not_client_callable(self): + def test_not_client_callable(self, lenient): def client(): pass qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = True + lenient.__dict__[qualname_client] = True emsg = "Cannot unregister .* client, as .* is not a valid .* client." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(client) + with pytest.raises(ValueError, match=emsg): + lenient.unregister_client(client) - def test_str(self): + def test_str(self, lenient): client = "client" - self.lenient.__dict__[client] = (None,) - self.lenient.unregister_client(client) - self.assertNotIn(client, self.lenient.__dict__) + lenient.__dict__[client] = (None,) + lenient.unregister_client(client) + assert client not in lenient.__dict__ - def test_callable(self): + def test_callable(self, lenient): def client(): pass qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = (None,) - self.lenient.unregister_client(client) - self.assertNotIn(qualname_client, self.lenient.__dict__) + lenient.__dict__[qualname_client] = (None,) + lenient.unregister_client(client) + assert qualname_client not in lenient.__dict__ -class Test_unregister_service(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): +class Test_unregister_service: + def test_not_protected(self, lenient): emsg = "Cannot unregister .* service, as .* is a protected .* option." for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(protected) + lenient.__dict__[protected] = None + with pytest.raises(ValueError, match=emsg): + lenient.unregister_service(protected) - def test_not_in(self): + def test_not_in(self, lenient): emsg = "Cannot unregister unknown .* service" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service("service") + with pytest.raises(ValueError, match=emsg): + lenient.unregister_service("service") - def test_not_service(self): + def test_not_service(self, lenient): service = "service" - self.lenient.__dict__[service] = (None,) + lenient.__dict__[service] = (None,) emsg = "Cannot unregister .* service, as .* is not a valid .* service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(service) + with pytest.raises(ValueError, match=emsg): + lenient.unregister_service(service) - def test_not_service_callable(self): + def test_not_service_callable(self, lenient): def service(): pass qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = (None,) + lenient.__dict__[qualname_service] = (None,) emsg = "Cannot unregister .* service, as .* is not a valid .* service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(service) + with pytest.raises(ValueError, match=emsg): + lenient.unregister_service(service) - def test_str(self): + def test_str(self, lenient): service = "service" - self.lenient.__dict__[service] = True - self.lenient.unregister_service(service) - self.assertNotIn(service, self.lenient.__dict__) + lenient.__dict__[service] = True + lenient.unregister_service(service) + assert service not in lenient.__dict__ - def test_callable(self): + def test_callable(self, lenient): def service(): pass qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = True - self.lenient.unregister_service(service) - self.assertNotIn(qualname_service, self.lenient.__dict__) - - -if __name__ == "__main__": - tests.main() + lenient.__dict__[qualname_service] = True + lenient.unregister_service(service) + assert qualname_service not in lenient.__dict__ diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py index 509b183003..01e1853007 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_client.py @@ -4,42 +4,42 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.lenient._lenient_client`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from inspect import getmodule -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _LENIENT, _lenient_client -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): module_name = getmodule(self).__name__ self.client = f"{module_name}" + ".Test.{}..myclient" self.service = f"{module_name}" + ".Test.{}..myservice" self.active = "active" - self.args_in = sentinel.arg1, sentinel.arg2 - self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) + self.args_in = mocker.sentinel.arg1, mocker.sentinel.arg2 + self.kwargs_in = dict( + kwarg1=mocker.sentinel.kwarg1, kwarg2=mocker.sentinel.kwarg2 + ) def test_args_too_many(self): emsg = "Invalid lenient client arguments, expecting 1" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(None, None) + with pytest.raises(AssertionError, match=emsg): + _ = _lenient_client(None, None) def test_args_not_callable(self): emsg = "Invalid lenient client argument, expecting a callable" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(None) + with pytest.raises(AssertionError, match=emsg): + _ = _lenient_client(None) def test_args_and_kwargs(self): def func(): pass emsg = "Invalid lenient client, got both arguments and keyword arguments" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(func, services=func) + with pytest.raises(AssertionError, match=emsg): + _ = _lenient_client(func, services=func) def test_call_naked(self): @_lenient_client @@ -47,20 +47,20 @@ def myclient(): return _LENIENT.__dict__.copy() result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_naked") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client not in result def test_call_naked_alternative(self): def myclient(): return _LENIENT.__dict__.copy() result = _lenient_client(myclient)() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_naked_alternative") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client not in result def test_call_naked_client_args_kwargs(self): @_lenient_client @@ -68,15 +68,15 @@ def myclient(*args, **kwargs): return args, kwargs args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) + assert args_out == self.args_in + assert kwargs_out == self.kwargs_in def test_call_naked_doc(self): @_lenient_client def myclient(): """Myclient doc-string.""" - self.assertEqual(myclient.__doc__, "Myclient doc-string.") + assert myclient.__doc__ == "Myclient doc-string." def test_call_no_kwargs(self): @_lenient_client() @@ -84,20 +84,20 @@ def myclient(): return _LENIENT.__dict__.copy() result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_no_kwargs") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client not in result def test_call_no_kwargs_alternative(self): def myclient(): return _LENIENT.__dict__.copy() result = (_lenient_client())(myclient)() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_no_kwargs_alternative") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client not in result def test_call_kwargs_none(self): @_lenient_client(services=None) @@ -105,24 +105,24 @@ def myclient(): return _LENIENT.__dict__.copy() result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_kwargs_none") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client not in result - def test_call_kwargs_single(self): - service = sentinel.service + def test_call_kwargs_single(self, mocker): + service = mocker.sentinel.service @_lenient_client(services=service) def myclient(): return _LENIENT.__dict__.copy() result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_kwargs_single") - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) - self.assertEqual(result[qualname_client], (service,)) + assert result[self.active] == qualname_client + assert qualname_client in result + assert result[qualname_client] == (service,) def test_call_kwargs_single_callable(self): def myservice(): @@ -134,26 +134,26 @@ def myclient(): test_name = "test_call_kwargs_single_callable" result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format(test_name) - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) + assert result[self.active] == qualname_client + assert qualname_client in result qualname_services = (self.service.format(test_name),) - self.assertEqual(result[qualname_client], qualname_services) + assert result[qualname_client] == qualname_services - def test_call_kwargs_iterable(self): - services = (sentinel.service1, sentinel.service2) + def test_call_kwargs_iterable(self, mocker): + services = (mocker.sentinel.service1, mocker.sentinel.service2) @_lenient_client(services=services) def myclient(): return _LENIENT.__dict__.copy() result = myclient() - self.assertIn(self.active, result) + assert self.active in result qualname_client = self.client.format("test_call_kwargs_iterable") - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) - self.assertEqual(set(result[qualname_client]), set(services)) + assert result[self.active] == qualname_client + assert qualname_client in result + assert set(result[qualname_client]) == set(services) def test_call_client_args_kwargs(self): @_lenient_client() @@ -161,16 +161,12 @@ def myclient(*args, **kwargs): return args, kwargs args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) + assert args_out == self.args_in + assert kwargs_out == self.kwargs_in def test_call_doc(self): @_lenient_client() def myclient(): """Myclient doc-string.""" - self.assertEqual(myclient.__doc__, "Myclient doc-string.") - - -if __name__ == "__main__": - tests.main() + assert myclient.__doc__ == "Myclient doc-string." diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py index c0ed8df403..d89bbc977b 100644 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ b/lib/iris/tests/unit/common/lenient/test__lenient_service.py @@ -4,31 +4,31 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.lenient._lenient_service`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from inspect import getmodule -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _LENIENT, _lenient_service -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): module_name = getmodule(self).__name__ self.service = f"{module_name}" + ".Test.{}..myservice" - self.args_in = sentinel.arg1, sentinel.arg2 - self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) + self.args_in = mocker.sentinel.arg1, mocker.sentinel.arg2 + self.kwargs_in = dict( + kwarg1=mocker.sentinel.kwarg1, kwarg2=mocker.sentinel.kwarg2 + ) def test_args_too_many(self): emsg = "Invalid lenient service arguments, expecting 1" - with self.assertRaisesRegex(AssertionError, emsg): + with pytest.raises(AssertionError, match=emsg): _lenient_service(None, None) def test_args_not_callable(self): emsg = "Invalid lenient service argument, expecting a callable" - with self.assertRaisesRegex(AssertionError, emsg): + with pytest.raises(AssertionError, match=emsg): _lenient_service(None) def test_call_naked(self): @@ -38,11 +38,11 @@ def myservice(): qualname_service = self.service.format("test_call_naked") state = _LENIENT.__dict__ - self.assertIn(qualname_service, state) - self.assertTrue(state[qualname_service]) + assert qualname_service in state + assert state[qualname_service] result = myservice() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) + assert qualname_service in result + assert result[qualname_service] def test_call_naked_alternative(self): def myservice(): @@ -50,8 +50,8 @@ def myservice(): qualname_service = self.service.format("test_call_naked_alternative") result = _lenient_service(myservice)() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) + assert qualname_service in result + assert result[qualname_service] def test_call_naked_service_args_kwargs(self): @_lenient_service @@ -59,15 +59,15 @@ def myservice(*args, **kwargs): return args, kwargs args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) + assert args_out == self.args_in + assert kwargs_out == self.kwargs_in def test_call_naked_doc(self): @_lenient_service def myservice(): """Myservice doc-string.""" - self.assertEqual(myservice.__doc__, "Myservice doc-string.") + assert myservice.__doc__ == "Myservice doc-string." def test_call(self): @_lenient_service() @@ -76,11 +76,11 @@ def myservice(): qualname_service = self.service.format("test_call") state = _LENIENT.__dict__ - self.assertIn(qualname_service, state) - self.assertTrue(state[qualname_service]) + assert qualname_service in state + assert state[qualname_service] result = myservice() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) + assert qualname_service in result + assert result[qualname_service] def test_call_alternative(self): def myservice(): @@ -88,8 +88,8 @@ def myservice(): qualname_service = self.service.format("test_call_alternative") result = (_lenient_service())(myservice)() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) + assert qualname_service in result + assert result[qualname_service] def test_call_service_args_kwargs(self): @_lenient_service() @@ -97,16 +97,12 @@ def myservice(*args, **kwargs): return args, kwargs args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) + assert args_out == self.args_in + assert kwargs_out == self.kwargs_in def test_call_doc(self): @_lenient_service() def myservice(): """Myservice doc-string.""" - self.assertEqual(myservice.__doc__, "Myservice doc-string.") - - -if __name__ == "__main__": - tests.main() + assert myservice.__doc__ == "Myservice doc-string." diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 49576814d4..69d2d229e1 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -4,25 +4,23 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.lenient._qualname`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from inspect import getmodule -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _qualname -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self): module_name = getmodule(self).__name__ self.locals = f"{module_name}" + ".Test.{}..{}" - def test_pass_thru_non_callable(self): - func = sentinel.func + def test_pass_thru_non_callable(self, mocker): + func = mocker.sentinel.func result = _qualname(func) - self.assertEqual(result, func) + assert result == func def test_callable_function_local(self): def myfunc(): @@ -30,13 +28,13 @@ def myfunc(): qualname_func = self.locals.format("test_callable_function_local", "myfunc") result = _qualname(myfunc) - self.assertEqual(result, qualname_func) + assert result == qualname_func def test_callable_function(self): import iris result = _qualname(iris.load) - self.assertEqual(result, "iris.load") + assert result == "iris.load" def test_callable_method_local(self): class MyClass: @@ -47,14 +45,10 @@ def mymethod(self): "test_callable_method_local", "MyClass.mymethod" ) result = _qualname(MyClass.mymethod) - self.assertEqual(result, qualname_method) + assert result == qualname_method def test_callable_method(self): import iris result = _qualname(iris.cube.Cube.add_ancillary_variable) - self.assertEqual(result, "iris.cube.Cube.add_ancillary_variable") - - -if __name__ == "__main__": - tests.main() + assert result == "iris.cube.Cube.add_ancillary_variable" diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py index b7304f4301..a12af28242 100644 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py @@ -4,25 +4,22 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import AncillaryVariableMetadata, BaseMetadata -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes self.cls = AncillaryVariableMetadata def test_repr(self): @@ -44,7 +41,7 @@ def test_repr(self): self.units, self.attributes, ) - self.assertEqual(expected, repr(metadata)) + assert repr(metadata) == expected def test__fields(self): expected = ( @@ -54,107 +51,108 @@ def test__fields(self): "units", "attributes", ) - self.assertEqual(self.cls._fields, expected) + assert self.cls._fields == expected def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) + assert issubclass(self.cls, BaseMetadata) -class Test___eq__(tests.IrisTest): - def setUp(self): +class Test___eq__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = AncillaryVariableMetadata def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + assert self.cls.__eq__.__doc__ == BaseMetadata.__eq__.__doc__ def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) + assert qualname___eq__ in _LENIENT + assert _LENIENT[qualname___eq__] + assert _LENIENT[self.cls.__eq__] - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value + def test_call(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): + + patcher = mocker.patch.object(BaseMetadata, "__eq__", return_value=return_value) + result = metadata.__eq__(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == {} + + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) -class Test___lt__(tests.IrisTest): - def setUp(self): +class Test___lt__: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = AncillaryVariableMetadata self.one = self.cls(1, 1, 1, 1, 1) self.two = self.cls(1, 1, 1, 2, 1) @@ -163,99 +161,100 @@ def setUp(self): def test__ascending_lt(self): result = self.one < self.two - self.assertTrue(result) + assert result def test__descending_lt(self): result = self.two < self.one - self.assertFalse(result) + assert not result def test__none_rhs_operand(self): result = self.one < self.none - self.assertFalse(result) + assert not result def test__none_lhs_operand(self): result = self.none < self.one - self.assertTrue(result) + assert result def test__ignore_attributes(self): result = self.one < self.attributes - self.assertFalse(result) + assert not result result = self.attributes < self.one - self.assertFalse(result) + assert not result -class Test_combine(tests.IrisTest): - def setUp(self): +class Test_combine: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = AncillaryVariableMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) + assert self.cls.combine.__doc__ == BaseMetadata.combine.__doc__ def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_combine in _LENIENT + assert _LENIENT[qualname_combine] + assert _LENIENT[self.cls.combine] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.combine(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) + ) + result = self.none.combine(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy @@ -263,20 +262,20 @@ def test_op_lenient_different(self): expected = self.values.copy() expected["units"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values.copy() - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy @@ -284,11 +283,11 @@ def test_op_strict_different(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None @@ -296,81 +295,82 @@ def test_op_strict_different_none(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected -class Test_difference(tests.IrisTest): - def setUp(self): +class Test_difference: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = AncillaryVariableMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) + assert self.cls.difference.__doc__ == BaseMetadata.difference.__doc__ def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_difference in _LENIENT + assert _LENIENT[qualname_difference] + assert _LENIENT[self.cls.difference] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.difference(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) + ) + result = self.none.difference(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -381,19 +381,19 @@ def test_op_lenient_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["units"] = lexpected["units"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -404,11 +404,11 @@ def test_op_strict_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -419,54 +419,47 @@ def test_op_strict_different_none(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected -class Test_equal(tests.IrisTest): - def setUp(self): +class Test_equal: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = AncillaryVariableMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + assert self.cls.equal.__doc__ == BaseMetadata.equal.__doc__ def test_lenient_service(self): qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() + assert qualname_equal in _LENIENT + assert _LENIENT[qualname_equal] + assert _LENIENT[self.cls.equal] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other, lenient=lenient) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index 73886882de..1944ecd6be 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -4,28 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.metadata.BaseMetadata`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from collections import OrderedDict -import unittest.mock as mock -from unittest.mock import sentinel import numpy as np import numpy.ma as ma +import pytest from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CubeMetadata +from iris.tests._shared_utils import assert_dict_equal -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes self.cls = BaseMetadata def test_repr(self): @@ -47,7 +44,7 @@ def test_repr(self): self.units, self.attributes, ) - self.assertEqual(expected, repr(metadata)) + assert repr(metadata) == expected def test_str(self): metadata = self.cls( @@ -58,7 +55,7 @@ def test_str(self): attributes={}, ) expected = f"BaseMetadata(var_name={self.var_name!r}, units={self.units!r})" - self.assertEqual(expected, str(metadata)) + assert str(metadata) == expected def test__fields(self): expected = ( @@ -68,70 +65,72 @@ def test__fields(self): "units", "attributes", ) - self.assertEqual(expected, self.cls._fields) + assert self.cls._fields == expected -class Test___eq__(tests.IrisTest): - def setUp(self): +class Test___eq__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.kwargs = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) self.cls = BaseMetadata self.metadata = self.cls(**self.kwargs) def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) + assert qualname___eq__ in _LENIENT + assert _LENIENT[qualname___eq__] + assert _LENIENT[self.cls.__eq__] def test_cannot_compare_non_class(self): result = self.metadata.__eq__(None) - self.assertIs(NotImplemented, result) + assert result is NotImplemented def test_cannot_compare_different_class(self): other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) result = self.metadata.__eq__(other) - self.assertIs(NotImplemented, result) - - def test_lenient(self): - return_value = sentinel.return_value - with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: - with mock.patch.object( - self.cls, "_compare_lenient", return_value=return_value - ) as mcompare: - result = self.metadata.__eq__(self.metadata) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcompare.call_count) + assert result is NotImplemented + + def test_lenient(self, mocker): + return_value = mocker.sentinel.return_value + mlenient = mocker.patch("iris.common.metadata._LENIENT", return_value=True) + mcompare = mocker.patch.object( + self.cls, "_compare_lenient", return_value=return_value + ) + result = self.metadata.__eq__(self.metadata) + + assert result == return_value + assert mcompare.call_count == 1 (arg,), kwargs = mcompare.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} - self.assertEqual(1, mlenient.call_count) + assert mlenient.call_count == 1 (arg,), kwargs = mlenient.call_args - self.assertEqual(_qualname(self.cls.__eq__), _qualname(arg)) - self.assertEqual(dict(), kwargs) + assert _qualname(arg) == _qualname(self.cls.__eq__) + assert kwargs == {} def test_strict_same(self): - self.assertTrue(self.metadata.__eq__(self.metadata)) + assert self.metadata.__eq__(self.metadata) other = self.cls(**self.kwargs) - self.assertTrue(self.metadata.__eq__(other)) - self.assertTrue(other.__eq__(self.metadata)) + assert self.metadata.__eq__(other) + assert other.__eq__(self.metadata) def test_strict_different(self): self.kwargs["var_name"] = None other = self.cls(**self.kwargs) - self.assertFalse(self.metadata.__eq__(other)) - self.assertFalse(other.__eq__(self.metadata)) + assert not self.metadata.__eq__(other) + assert not other.__eq__(self.metadata) -class Test___lt__(tests.IrisTest): - def setUp(self): +class Test___lt__: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = BaseMetadata self.one = self.cls(1, 1, 1, 1, 1) self.two = self.cls(1, 1, 1, 2, 1) @@ -140,122 +139,125 @@ def setUp(self): def test__ascending_lt(self): result = self.one < self.two - self.assertTrue(result) + assert result def test__descending_lt(self): result = self.two < self.one - self.assertFalse(result) + assert not result def test__none_rhs_operand(self): result = self.one < self.none - self.assertFalse(result) + assert not result def test__none_lhs_operand(self): result = self.none < self.one - self.assertTrue(result) + assert result def test__ignore_attributes(self): result = self.one < self.attributes - self.assertFalse(result) + assert not result result = self.attributes < self.one - self.assertFalse(result) + assert not result -class Test___ne__(tests.IrisTest): - def setUp(self): +class Test___ne__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.other = sentinel.other + self.other = mocker.sentinel.other - def test_notimplemented(self): + def test_notimplemented(self, mocker): return_value = NotImplemented - with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: - result = self.metadata.__ne__(self.other) + patcher = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + result = self.metadata.__ne__(self.other) - self.assertIs(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) + assert result is return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == self.other + assert kwargs == {} - def test_negate_true(self): + def test_negate_true(self, mocker): return_value = True - with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: - result = self.metadata.__ne__(self.other) + patcher = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + result = self.metadata.__ne__(self.other) - self.assertFalse(result) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) + assert not result + (arg,), kwargs = patcher.call_args + assert arg == self.other + assert kwargs == {} - def test_negate_false(self): + def test_negate_false(self, mocker): return_value = False - with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: - result = self.metadata.__ne__(self.other) + patcher = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + result = self.metadata.__ne__(self.other) - self.assertTrue(result) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) + assert result + (arg,), kwargs = patcher.call_args + assert arg == self.other + assert kwargs == {} -class Test__combine(tests.IrisTest): - def setUp(self): +class Test__combine: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.kwargs = dict( standard_name="standard_name", long_name="long_name", var_name="var_name", units="units", - attributes=dict(one=sentinel.one, two=sentinel.two), + attributes=dict(one=mocker.sentinel.one, two=mocker.sentinel.two), ) self.cls = BaseMetadata self.metadata = self.cls(**self.kwargs) - def test_lenient(self): - return_value = sentinel._combine_lenient - other = sentinel.other - with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: - with mock.patch.object( - self.cls, "_combine_lenient", return_value=return_value - ) as mcombine: - result = self.metadata._combine(other) - - self.assertEqual(1, mlenient.call_count) - (arg,), kwargs = mlenient.call_args - self.assertEqual(self.metadata.combine, arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcombine.call_count) - (arg,), kwargs = mcombine.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_strict(self): - dummy = sentinel.dummy + def test_lenient(self, mocker): + return_value = mocker.sentinel._combine_lenient + other = mocker.sentinel.other + mlenient = mocker.patch("iris.common.metadata._LENIENT", return_value=True) + mcombine = mocker.patch.object( + self.cls, "_combine_lenient", return_value=return_value + ) + result = self.metadata._combine(other) + + assert mlenient.call_count == 1 + (arg,), kwargs = mlenient.call_args + assert arg == self.metadata.combine + assert kwargs == {} + + assert result == return_value + assert mcombine.call_count == 1 + (arg,), kwargs = mcombine.call_args + assert arg == other + assert kwargs == {} + + def test_strict(self, mocker): + dummy = mocker.sentinel.dummy values = self.kwargs.copy() values["standard_name"] = dummy values["var_name"] = dummy values["attributes"] = dummy other = self.cls(**values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - result = self.metadata._combine(other) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + result = self.metadata._combine(other) expected = [ None if values[field] == dummy else values[field] for field in self.cls._fields ] - self.assertEqual(expected, result) + assert result == expected -class Test__combine_lenient(tests.IrisTest): - def setUp(self): +class Test__combine_lenient: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cls = BaseMetadata self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, ) def test_strict_units(self): @@ -266,8 +268,8 @@ def test_strict_units(self): rmetadata = self.cls(**right) expected = list(left.values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + assert lmetadata._combine_lenient(rmetadata) == expected + assert rmetadata._combine_lenient(lmetadata) == expected def test_strict_units_different(self): left = self.none.copy() @@ -279,9 +281,9 @@ def test_strict_units_different(self): result = lmetadata._combine_lenient(rmetadata) expected = list(self.none.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected def test_strict_units_different_none(self): left = self.none.copy() @@ -292,57 +294,57 @@ def test_strict_units_different_none(self): result = lmetadata._combine_lenient(rmetadata) expected = list(self.none.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected - def test_attributes(self): + def test_attributes(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) + ldict = dict(item=mocker.sentinel.left) + rdict = dict(item=mocker.sentinel.right) left["attributes"] = ldict right["attributes"] = rdict rmetadata = self.cls(**right) - return_value = sentinel.return_value - with mock.patch.object( + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( self.cls, "_combine_lenient_attributes", return_value=return_value, - ) as mocker: - lmetadata = self.cls(**left) - result = lmetadata._combine_lenient(rmetadata) + ) + lmetadata = self.cls(**left) + result = lmetadata._combine_lenient(rmetadata) expected = self.none.copy() expected["attributes"] = return_value expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected - self.assertEqual(1, mocker.call_count) - args, kwargs = mocker.call_args + assert patcher.call_count == 1 + args, kwargs = patcher.call_args expected = (ldict, rdict) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) + assert args == expected + assert kwargs == {} - def test_attributes_non_mapping_different(self): + def test_attributes_non_mapping_different(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right + ldict = dict(item=mocker.sentinel.left) + rdict = mocker.sentinel.right left["attributes"] = ldict right["attributes"] = rdict lmetadata = self.cls(**left) rmetadata = self.cls(**right) expected = list(self.none.copy().values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + assert lmetadata._combine_lenient(rmetadata) == expected + assert rmetadata._combine_lenient(lmetadata) == expected - def test_attributes_non_mapping_different_none(self): + def test_attributes_non_mapping_different_none(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) + ldict = dict(item=mocker.sentinel.left) left["attributes"] = ldict lmetadata = self.cls(**left) rmetadata = self.cls(**right) @@ -351,10 +353,10 @@ def test_attributes_non_mapping_different_none(self): expected = self.none.copy() expected["attributes"] = ldict expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected def test_names(self): left = self.none.copy() @@ -364,11 +366,11 @@ def test_names(self): rmetadata = self.cls(**right) expected = list(left.values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + assert lmetadata._combine_lenient(rmetadata) == expected + assert rmetadata._combine_lenient(lmetadata) == expected - def test_names_different(self): - dummy = sentinel.dummy + def test_names_different(self, mocker): + dummy = mocker.sentinel.dummy left = self.none.copy() right = self.none.copy() left.update(self.names) @@ -379,8 +381,8 @@ def test_names_different(self): rmetadata = self.cls(**right) expected = list(self.none.copy().values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) + assert lmetadata._combine_lenient(rmetadata) == expected + assert rmetadata._combine_lenient(lmetadata) == expected def test_names_different_none(self): left = self.none.copy() @@ -391,14 +393,15 @@ def test_names_different_none(self): result = lmetadata._combine_lenient(rmetadata) expected = list(left.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected -class Test__combine_lenient_attributes(tests.IrisTest): - def setUp(self): +class Test__combine_lenient_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( one="one", two="two", @@ -408,7 +411,7 @@ def setUp(self): ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() @@ -416,10 +419,10 @@ def test_same(self): result = self.metadata._combine_lenient_attributes(left, right) expected = left - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_different(self): left = self.values.copy() @@ -430,10 +433,10 @@ def test_different(self): expected = self.values.copy() for key in ["two", "four"]: del expected[key] - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_different_none(self): left = self.values.copy() @@ -444,10 +447,10 @@ def test_different_none(self): expected = self.values.copy() for key in ["one", "three", "five"]: del expected[key] - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_extra(self): left = self.values.copy() @@ -459,14 +462,15 @@ def test_extra(self): expected = self.values.copy() expected["extra_left"] = left["extra_left"] expected["extra_right"] = right["extra_right"] - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) -class Test__combine_strict_attributes(tests.IrisTest): - def setUp(self): +class Test__combine_strict_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( one="one", two="two", @@ -476,7 +480,7 @@ def setUp(self): ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() @@ -484,10 +488,10 @@ def test_same(self): result = self.metadata._combine_strict_attributes(left, right) expected = left - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_different(self): left = self.values.copy() @@ -498,10 +502,10 @@ def test_different(self): expected = self.values.copy() for key in ["one", "three"]: del expected[key] - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_different_none(self): left = self.values.copy() @@ -512,10 +516,10 @@ def test_different_none(self): expected = self.values.copy() for key in ["one", "three", "five"]: del expected[key] - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) def test_extra(self): left = self.values.copy() @@ -525,76 +529,71 @@ def test_extra(self): result = self.metadata._combine_strict_attributes(left, right) expected = self.values.copy() - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) + assert_dict_equal(result, expected) -class Test__compare_lenient(tests.IrisTest): - def setUp(self): +class Test__compare_lenient: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cls = BaseMetadata self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, ) - def test_name_same(self): + def test_name_same(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes", return_value=False) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) # mocker not called for "units" nor "var_name" members. expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) + assert patcher.call_count == expected - def test_name_same_lenient_false__long_name_different(self): + def test_name_same_lenient_false__long_name_different(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() - right["long_name"] = sentinel.dummy + right["long_name"] = mocker.sentinel.dummy lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes", return_value=False) + assert not lmetadata._compare_lenient(rmetadata) + assert not rmetadata._compare_lenient(lmetadata) # mocker not called for "units" nor "var_name" members. expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) + assert patcher.call_count == expected - def test_name_same_lenient_true__var_name_different(self): + def test_name_same_lenient_true__var_name_different(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() - right["var_name"] = sentinel.dummy + right["var_name"] = mocker.sentinel.dummy lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes", return_value=False) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) # mocker not called for "units" nor "var_name" members. expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) + assert patcher.call_count == expected - def test_name_different(self): + def test_name_different(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() @@ -602,13 +601,13 @@ def test_name_different(self): lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object(self.cls, "_is_attributes") as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes") + assert not lmetadata._compare_lenient(rmetadata) + assert not rmetadata._compare_lenient(lmetadata) - self.assertEqual(0, mocker.call_count) + assert patcher.call_count == 0 - def test_strict_units(self): + def test_strict_units(self, mocker): left = self.none.copy() left.update(self.names) left["units"] = "K" @@ -616,17 +615,15 @@ def test_strict_units(self): lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes", return_value=False) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) # mocker not called for "units" nor "var_name" members. expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) + assert patcher.call_count == expected - def test_strict_units_different(self): + def test_strict_units_different(self, mocker): left = self.none.copy() left.update(self.names) left["units"] = "K" @@ -635,63 +632,61 @@ def test_strict_units_different(self): lmetadata = self.cls(**left) rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) + patcher = mocker.patch.object(self.cls, "_is_attributes", return_value=False) + assert not lmetadata._compare_lenient(rmetadata) + assert not rmetadata._compare_lenient(lmetadata) # mocker not called for "units" nor "var_name" members. expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) + assert patcher.call_count == expected - def test_attributes(self): + def test_attributes(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) + ldict = dict(item=mocker.sentinel.left) + rdict = dict(item=mocker.sentinel.right) left["attributes"] = ldict right["attributes"] = rdict rmetadata = self.cls(**right) - with mock.patch.object( + patcher = mocker.patch.object( self.cls, "_compare_lenient_attributes", return_value=True, - ) as mocker: - lmetadata = self.cls(**left) - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) + ) + lmetadata = self.cls(**left) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) - self.assertEqual(2, mocker.call_count) + assert patcher.call_count == 2 expected = [((ldict, rdict),), ((rdict, ldict),)] - self.assertEqual(expected, mocker.call_args_list) + assert patcher.call_args_list == expected - def test_attributes_non_mapping_different(self): + def test_attributes_non_mapping_different(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right + ldict = dict(item=mocker.sentinel.left) + rdict = mocker.sentinel.right left["attributes"] = ldict right["attributes"] = rdict lmetadata = self.cls(**left) rmetadata = self.cls(**right) - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) + assert not lmetadata._compare_lenient(rmetadata) + assert not rmetadata._compare_lenient(lmetadata) - def test_attributes_non_mapping_different_none(self): + def test_attributes_non_mapping_different_none(self, mocker): left = self.none.copy() left.update(self.names) right = left.copy() - ldict = dict(item=sentinel.left) + ldict = dict(item=mocker.sentinel.left) left["attributes"] = ldict lmetadata = self.cls(**left) rmetadata = self.cls(**right) - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) def test_names(self): left = self.none.copy() @@ -702,179 +697,183 @@ def test_names(self): lmetadata = self.cls(**left) rmetadata = self.cls(**right) - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._combine_lenient(lmetadata)) + assert lmetadata._compare_lenient(rmetadata) + assert rmetadata._compare_lenient(lmetadata) -class Test__compare_lenient_attributes(tests.IrisTest): - def setUp(self): +class Test__compare_lenient_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, + one=mocker.sentinel.one, + two=mocker.sentinel.two, three=np.int16(123), four=np.arange(10), five=ma.arange(5), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() right = self.values.copy() - self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) - self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) + assert self.metadata._compare_lenient_attributes(left, right) + assert self.metadata._compare_lenient_attributes(right, left) def test_different(self): left = self.values.copy() right = self.values.copy() left["two"] = left["four"] = self.dummy - self.assertFalse(self.metadata._compare_lenient_attributes(left, right)) - self.assertFalse(self.metadata._compare_lenient_attributes(right, left)) + assert not self.metadata._compare_lenient_attributes(left, right) + assert not self.metadata._compare_lenient_attributes(right, left) def test_different_none(self): left = self.values.copy() right = self.values.copy() left["one"] = left["three"] = left["five"] = None - self.assertFalse(self.metadata._compare_lenient_attributes(left, right)) - self.assertFalse(self.metadata._compare_lenient_attributes(right, left)) + assert not self.metadata._compare_lenient_attributes(left, right) + assert not self.metadata._compare_lenient_attributes(right, left) - def test_extra(self): + def test_extra(self, mocker): left = self.values.copy() right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right + left["extra_left"] = mocker.sentinel.extra_left + right["extra_right"] = mocker.sentinel.extra_right - self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) - self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) + assert self.metadata._compare_lenient_attributes(left, right) + assert self.metadata._compare_lenient_attributes(right, left) -class Test__compare_strict_attributes(tests.IrisTest): - def setUp(self): +class Test__compare_strict_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, + one=mocker.sentinel.one, + two=mocker.sentinel.two, three=np.int16(123), four=np.arange(10), five=ma.arange(5), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() right = self.values.copy() - self.assertTrue(self.metadata._compare_strict_attributes(left, right)) - self.assertTrue(self.metadata._compare_strict_attributes(right, left)) + assert self.metadata._compare_strict_attributes(left, right) + assert self.metadata._compare_strict_attributes(right, left) def test_different(self): left = self.values.copy() right = self.values.copy() left["two"] = left["four"] = self.dummy - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + assert not self.metadata._compare_strict_attributes(left, right) + assert not self.metadata._compare_strict_attributes(right, left) def test_different_none(self): left = self.values.copy() right = self.values.copy() left["one"] = left["three"] = left["five"] = None - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + assert not self.metadata._compare_strict_attributes(left, right) + assert not self.metadata._compare_strict_attributes(right, left) - def test_extra(self): + def test_extra(self, mocker): left = self.values.copy() right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right + left["extra_left"] = mocker.sentinel.extra_left + right["extra_right"] = mocker.sentinel.extra_right - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + assert not self.metadata._compare_strict_attributes(left, right) + assert not self.metadata._compare_strict_attributes(right, left) -class Test__difference(tests.IrisTest): - def setUp(self): +class Test__difference: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.kwargs = dict( standard_name="standard_name", long_name="long_name", var_name="var_name", units="units", - attributes=dict(one=sentinel.one, two=sentinel.two), + attributes=dict(one=mocker.sentinel.one, two=mocker.sentinel.two), ) self.cls = BaseMetadata self.metadata = self.cls(**self.kwargs) - def test_lenient(self): - return_value = sentinel._difference_lenient - other = sentinel.other - with mock.patch("iris.common.metadata._LENIENT", return_value=True) as mlenient: - with mock.patch.object( - self.cls, "_difference_lenient", return_value=return_value - ) as mdifference: - result = self.metadata._difference(other) - - self.assertEqual(1, mlenient.call_count) - (arg,), kwargs = mlenient.call_args - self.assertEqual(self.metadata.difference, arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(return_value, result) - self.assertEqual(1, mdifference.call_count) - (arg,), kwargs = mdifference.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_strict(self): - dummy = sentinel.dummy + def test_lenient(self, mocker): + return_value = mocker.sentinel._difference_lenient + other = mocker.sentinel.other + mlenient = mocker.patch("iris.common.metadata._LENIENT", return_value=True) + mdifference = mocker.patch.object( + self.cls, "_difference_lenient", return_value=return_value + ) + result = self.metadata._difference(other) + + assert mlenient.call_count == 1 + (arg,), kwargs = mlenient.call_args + assert arg == self.metadata.difference + assert kwargs == {} + + assert result == return_value + assert mdifference.call_count == 1 + (arg,), kwargs = mdifference.call_args + assert arg == other + assert kwargs == {} + + def test_strict(self, mocker): + dummy = mocker.sentinel.dummy values = self.kwargs.copy() values["long_name"] = dummy values["units"] = dummy other = self.cls(**values) method = "_difference_strict_attributes" - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - with mock.patch.object(self.cls, method, return_value=None) as mdifference: - result = self.metadata._difference(other) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + mdifference = mocker.patch.object(self.cls, method, return_value=None) + result = self.metadata._difference(other) expected = [ (self.kwargs[field], dummy) if values[field] == dummy else None for field in self.cls._fields ] - self.assertEqual(expected, result) - self.assertEqual(1, mdifference.call_count) + assert result == expected + assert mdifference.call_count == 1 args, kwargs = mdifference.call_args expected = (self.kwargs["attributes"], values["attributes"]) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) + assert args == expected + assert kwargs == {} - with mock.patch.object(self.cls, method, return_value=None) as mdifference: - result = other._difference(self.metadata) + mdifference = mocker.patch.object(self.cls, method, return_value=None) + result = other._difference(self.metadata) expected = [ (dummy, self.kwargs[field]) if values[field] == dummy else None for field in self.cls._fields ] - self.assertEqual(expected, result) - self.assertEqual(1, mdifference.call_count) + assert result == expected + assert mdifference.call_count == 1 args, kwargs = mdifference.call_args expected = (self.kwargs["attributes"], values["attributes"]) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) + assert args == expected + assert kwargs == {} -class Test__difference_lenient(tests.IrisTest): - def setUp(self): +class Test__difference_lenient: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cls = BaseMetadata self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, ) def test_strict_units(self): @@ -884,8 +883,8 @@ def test_strict_units(self): lmetadata = self.cls(**left) rmetadata = self.cls(**right) expected = list(self.none.values()) - self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) + assert lmetadata._difference_lenient(rmetadata) == expected + assert rmetadata._difference_lenient(lmetadata) == expected def test_strict_units_different(self): left = self.none.copy() @@ -900,13 +899,13 @@ def test_strict_units_different(self): expected = self.none.copy() expected["units"] = (lunits, runits) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) expected = self.none.copy() expected["units"] = (runits, lunits) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected def test_strict_units_different_none(self): left = self.none.copy() @@ -921,46 +920,46 @@ def test_strict_units_different_none(self): expected["units"] = (lunits, runits) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) expected = self.none.copy() expected["units"] = (runits, lunits) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected - def test_attributes(self): + def test_attributes(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) + ldict = dict(item=mocker.sentinel.left) + rdict = dict(item=mocker.sentinel.right) left["attributes"] = ldict right["attributes"] = rdict rmetadata = self.cls(**right) - return_value = sentinel.return_value - with mock.patch.object( + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( self.cls, "_difference_lenient_attributes", return_value=return_value, - ) as mocker: - lmetadata = self.cls(**left) - result = lmetadata._difference_lenient(rmetadata) + ) + lmetadata = self.cls(**left) + result = lmetadata._difference_lenient(rmetadata) expected = self.none.copy() expected["attributes"] = return_value expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected - self.assertEqual(1, mocker.call_count) - args, kwargs = mocker.call_args + assert patcher.call_count == 1 + args, kwargs = patcher.call_args expected = (ldict, rdict) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) + assert args == expected + assert kwargs == {} - def test_attributes_non_mapping_different(self): + def test_attributes_non_mapping_different(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right + ldict = dict(item=mocker.sentinel.left) + rdict = mocker.sentinel.right left["attributes"] = ldict right["attributes"] = rdict lmetadata = self.cls(**left) @@ -970,28 +969,28 @@ def test_attributes_non_mapping_different(self): expected = self.none.copy() expected["attributes"] = (ldict, rdict) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) expected = self.none.copy() expected["attributes"] = (rdict, ldict) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected - def test_attributes_non_mapping_different_none(self): + def test_attributes_non_mapping_different_none(self, mocker): left = self.none.copy() right = self.none.copy() - ldict = dict(item=sentinel.left) + ldict = dict(item=mocker.sentinel.left) left["attributes"] = ldict lmetadata = self.cls(**left) rmetadata = self.cls(**right) result = lmetadata._difference_lenient(rmetadata) expected = list(self.none.copy().values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected def test_names(self): left = self.none.copy() @@ -1001,11 +1000,11 @@ def test_names(self): rmetadata = self.cls(**right) expected = list(self.none.values()) - self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) + assert lmetadata._difference_lenient(rmetadata) == expected + assert rmetadata._difference_lenient(lmetadata) == expected - def test_names_different(self): - dummy = sentinel.dummy + def test_names_different(self, mocker): + dummy = mocker.sentinel.dummy left = self.none.copy() right = self.none.copy() left.update(self.names) @@ -1024,7 +1023,7 @@ def test_names_different(self): expected["long_name"] = (left["long_name"], right["long_name"]) expected["var_name"] = (left["var_name"], right["var_name"]) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) expected = self.none.copy() @@ -1035,7 +1034,7 @@ def test_names_different(self): expected["long_name"] = (right["long_name"], left["long_name"]) expected["var_name"] = (right["var_name"], left["var_name"]) expected = list(expected.values()) - self.assertEqual(expected, result) + assert result == expected def test_names_different_none(self): left = self.none.copy() @@ -1046,34 +1045,35 @@ def test_names_different_none(self): result = lmetadata._difference_lenient(rmetadata) expected = list(self.none.values()) - self.assertEqual(expected, result) + assert result == expected result = rmetadata._difference_lenient(lmetadata) - self.assertEqual(expected, result) + assert result == expected -class Test__difference_lenient_attributes(tests.IrisTest): - def setUp(self): +class Test__difference_lenient_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, + one=mocker.sentinel.one, + two=mocker.sentinel.two, three=np.float64(3.14), four=np.arange(10, dtype=np.float64), five=ma.arange(10, dtype=np.int16), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() right = self.values.copy() result = self.metadata._difference_lenient_attributes(left, right) - self.assertIsNone(result) + assert result is None result = self.metadata._difference_lenient_attributes(right, left) - self.assertIsNone(result) + assert result is None def test_different(self): left = self.values.copy() @@ -1086,13 +1086,13 @@ def test_different(self): del right[key] expected_left, expected_right = (left, right) result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) + assert_dict_equal(result_left, expected_left) + assert_dict_equal(result_right, expected_right) result = self.metadata._difference_lenient_attributes(right, left) result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) + assert_dict_equal(result_left, expected_right) + assert_dict_equal(result_right, expected_left) def test_different_none(self): left = self.values.copy() @@ -1105,47 +1105,48 @@ def test_different_none(self): del right[key] expected_left, expected_right = (left, right) result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) + assert_dict_equal(result_left, expected_left) + assert_dict_equal(result_right, expected_right) result = self.metadata._difference_lenient_attributes(right, left) result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) + assert_dict_equal(result_left, expected_right) + assert_dict_equal(result_right, expected_left) - def test_extra(self): + def test_extra(self, mocker): left = self.values.copy() right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right + left["extra_left"] = mocker.sentinel.extra_left + right["extra_right"] = mocker.sentinel.extra_right result = self.metadata._difference_lenient_attributes(left, right) - self.assertIsNone(result) + assert result is None result = self.metadata._difference_lenient_attributes(right, left) - self.assertIsNone(result) + assert result is None -class Test__difference_strict_attributes(tests.IrisTest): - def setUp(self): +class Test__difference_strict_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, + one=mocker.sentinel.one, + two=mocker.sentinel.two, three=np.int32(123), four=np.arange(10), five=ma.arange(10), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy def test_same(self): left = self.values.copy() right = self.values.copy() result = self.metadata._difference_strict_attributes(left, right) - self.assertIsNone(result) + assert result is None result = self.metadata._difference_strict_attributes(right, left) - self.assertIsNone(result) + assert result is None def test_different(self): left = self.values.copy() @@ -1159,13 +1160,13 @@ def test_different(self): del expected_left[key] del expected_right[key] result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) + assert_dict_equal(result_left, expected_left) + assert_dict_equal(result_right, expected_right) result = self.metadata._difference_strict_attributes(right, left) result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) + assert_dict_equal(result_left, expected_right) + assert_dict_equal(result_right, expected_left) def test_different_none(self): left = self.values.copy() @@ -1179,54 +1180,56 @@ def test_different_none(self): del expected_left[key] del expected_right[key] result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) + assert_dict_equal(result_left, expected_left) + assert_dict_equal(result_right, expected_right) result = self.metadata._difference_strict_attributes(right, left) result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) + assert_dict_equal(result_left, expected_right) + assert_dict_equal(result_right, expected_left) - def test_extra(self): + def test_extra(self, mocker): left = self.values.copy() right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right + left["extra_left"] = mocker.sentinel.extra_left + right["extra_right"] = mocker.sentinel.extra_right result = self.metadata._difference_strict_attributes(left, right) expected_left = dict(extra_left=left["extra_left"]) expected_right = dict(extra_right=right["extra_right"]) result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) + assert_dict_equal(result_left, expected_left) + assert_dict_equal(result_right, expected_right) result = self.metadata._difference_strict_attributes(right, left) result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) + assert_dict_equal(result_left, expected_right) + assert_dict_equal(result_right, expected_left) -class Test__is_attributes(tests.IrisTest): - def setUp(self): +class Test__is_attributes: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) self.field = "attributes" def test_field(self): - self.assertTrue(self.metadata._is_attributes(self.field, {}, {})) + assert self.metadata._is_attributes(self.field, {}, {}) def test_field_not_attributes(self): - self.assertFalse(self.metadata._is_attributes(None, {}, {})) + assert not self.metadata._is_attributes(None, {}, {}) def test_left_not_mapping(self): - self.assertFalse(self.metadata._is_attributes(self.field, None, {})) + assert not self.metadata._is_attributes(self.field, None, {}) def test_right_not_mapping(self): - self.assertFalse(self.metadata._is_attributes(self.field, {}, None)) + assert not self.metadata._is_attributes(self.field, {}, None) -class Test_combine(tests.IrisTest): - def setUp(self): +class Test_combine: + @pytest.fixture(autouse=True) + def _setup(self, mocker): kwargs = dict( standard_name="standard_name", long_name="long_name", @@ -1237,84 +1240,79 @@ def setUp(self): self.cls = BaseMetadata self.metadata = self.cls(**kwargs) self.mock_kwargs = OrderedDict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) + assert qualname_combine in _LENIENT + assert _LENIENT[qualname_combine] + assert _LENIENT[self.cls.combine] def test_cannot_combine_non_class(self): emsg = "Cannot combine" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.combine(None) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.combine(None) def test_cannot_combine_different_class(self): other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) emsg = "Cannot combine" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.combine(other) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.combine(other) - def test_lenient_default(self): + def test_lenient_default(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mocker: - result = self.metadata.combine(self.metadata) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_true(self): + patcher = mocker.patch.object(self.cls, "_combine", return_value=return_value) + result = self.metadata.combine(self.metadata) + + assert result._asdict() == self.mock_kwargs + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg is self.metadata + assert kwargs == {} + + def test_lenient_true(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mcombine: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.combine(self.metadata, lenient=True) + mcombine = mocker.patch.object(self.cls, "_combine", return_value=return_value) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.combine(self.metadata, lenient=True) - self.assertEqual(1, mcontext.call_count) + assert mcontext.call_count == 1 (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.combine), arg) - self.assertEqual(dict(), kwargs) + assert arg == _qualname(self.cls.combine) + assert kwargs == {} - self.assertEqual(result._asdict(), self.mock_kwargs) - self.assertEqual(1, mcombine.call_count) + assert result._asdict() == self.mock_kwargs + assert mcombine.call_count == 1 (arg,), kwargs = mcombine.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} - def test_lenient_false(self): + def test_lenient_false(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mcombine: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.combine(self.metadata, lenient=False) + mcombine = mocker.patch.object(self.cls, "_combine", return_value=return_value) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.combine(self.metadata, lenient=False) - self.assertEqual(1, mcontext.call_count) + assert mcontext.call_count == 1 args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.combine): False}, kwargs) + assert args == () + assert kwargs == {_qualname(self.cls.combine): False} - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mcombine.call_count) + assert result._asdict() == self.mock_kwargs + assert mcombine.call_count == 1 (arg,), kwargs = mcombine.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} -class Test_difference(tests.IrisTest): - def setUp(self): +class Test_difference: + @pytest.fixture(autouse=True) + def _setup(self, mocker): kwargs = dict( standard_name="standard_name", long_name="long_name", @@ -1325,163 +1323,161 @@ def setUp(self): self.cls = BaseMetadata self.metadata = self.cls(**kwargs) self.mock_kwargs = OrderedDict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) + assert qualname_difference in _LENIENT + assert _LENIENT[qualname_difference] + assert _LENIENT[self.cls.difference] def test_cannot_differ_non_class(self): emsg = "Cannot differ" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.difference(None) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.difference(None) def test_cannot_differ_different_class(self): other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) emsg = "Cannot differ" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.difference(other) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.difference(other) - def test_lenient_default(self): + def test_lenient_default(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( + patcher = mocker.patch.object( self.cls, "_difference", return_value=return_value - ) as mocker: - result = self.metadata.difference(self.metadata) + ) + result = self.metadata.difference(self.metadata) - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert result._asdict() == self.mock_kwargs + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg is self.metadata + assert kwargs == {} - def test_lenient_true(self): + def test_lenient_true(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( + mdifference = mocker.patch.object( self.cls, "_difference", return_value=return_value - ) as mdifference: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.difference(self.metadata, lenient=True) + ) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.difference(self.metadata, lenient=True) - self.assertEqual(1, mcontext.call_count) + assert mcontext.call_count == 1 (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.difference), arg) - self.assertEqual(dict(), kwargs) + assert arg == _qualname(self.cls.difference) + assert kwargs == {} - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mdifference.call_count) + assert result._asdict() == self.mock_kwargs + assert mdifference.call_count == 1 (arg,), kwargs = mdifference.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} - def test_lenient_false(self): + def test_lenient_false(self, mocker): return_value = self.mock_kwargs.values() - with mock.patch.object( + mdifference = mocker.patch.object( self.cls, "_difference", return_value=return_value - ) as mdifference: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.difference(self.metadata, lenient=False) + ) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.difference(self.metadata, lenient=False) - self.assertEqual(mcontext.call_count, 1) + assert mcontext.call_count == 1 args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.difference): False}, kwargs) + assert args == () + assert kwargs == {_qualname(self.cls.difference): False} - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mdifference.call_count) + assert result._asdict() == self.mock_kwargs + assert mdifference.call_count == 1 (arg,), kwargs = mdifference.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} -class Test_equal(tests.IrisTest): - def setUp(self): +class Test_equal: + @pytest.fixture(autouse=True) + def _setup(self, mocker): kwargs = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, ) self.cls = BaseMetadata self.metadata = self.cls(**kwargs) def test_lenient_service(self): qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue((_LENIENT[self.cls.equal])) + assert qualname_equal in _LENIENT + assert _LENIENT[qualname_equal] + assert _LENIENT[self.cls.equal] def test_cannot_compare_non_class(self): emsg = "Cannot compare" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.equal(None) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.equal(None) def test_cannot_compare_different_class(self): other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) emsg = "Cannot compare" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.equal(other) - - def test_lenient_default(self): - return_value = sentinel.return_value - with mock.patch.object(self.cls, "__eq__", return_value=return_value) as mocker: - result = self.metadata.equal(self.metadata) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_true(self): - return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as m__eq__: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.equal(self.metadata, lenient=True) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcontext.call_count) + with pytest.raises(TypeError, match=emsg): + _ = self.metadata.equal(other) + + def test_lenient_default(self, mocker): + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + result = self.metadata.equal(self.metadata) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg is self.metadata + assert kwargs == {} + + def test_lenient_true(self, mocker): + return_value = mocker.sentinel.return_value + m__eq__ = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.equal(self.metadata, lenient=True) + + assert result == return_value + assert mcontext.call_count == 1 (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.equal), arg) - self.assertEqual(dict(), kwargs) + assert arg == _qualname(self.cls.equal) + assert kwargs == {} - self.assertEqual(1, m__eq__.call_count) + assert m__eq__.call_count == 1 (arg,), kwargs = m__eq__.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_false(self): - return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as m__eq__: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.equal(self.metadata, lenient=False) - - self.assertEqual(1, mcontext.call_count) + assert arg is self.metadata + assert kwargs == {} + + def test_lenient_false(self, mocker): + return_value = mocker.sentinel.return_value + m__eq__ = mocker.patch.object(self.cls, "__eq__", return_value=return_value) + mcontext = mocker.patch.object(_LENIENT, "context") + result = self.metadata.equal(self.metadata, lenient=False) + + assert mcontext.call_count == 1 args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.equal): False}, kwargs) + assert args == () + assert kwargs == {_qualname(self.cls.equal): False} - self.assertEqual(return_value, result) - self.assertEqual(1, m__eq__.call_count) + assert result == return_value + assert m__eq__.call_count == 1 (arg,), kwargs = m__eq__.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) + assert arg is self.metadata + assert kwargs == {} -class Test_name(tests.IrisTest): - def setUp(self): +class Test_name: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = BaseMetadata self.default = self.cls.DEFAULT_NAME @@ -1500,122 +1496,119 @@ def test_standard_name(self): metadata = self._make(standard_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(token, result) + assert result == token def test_standard_name__invalid_token(self): token = "nope nope" metadata = self._make(standard_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(self.default, result) + assert result == self.default def test_long_name(self): token = "long_name" metadata = self._make(long_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(token, result) + assert result == token def test_long_name__invalid_token(self): token = "nope nope" metadata = self._make(long_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(self.default, result) + assert result == self.default def test_var_name(self): token = "var_name" metadata = self._make(var_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(token, result) + assert result == token def test_var_name__invalid_token(self): token = "nope nope" metadata = self._make(var_name=token) result = metadata.name() - self.assertEqual(token, result) + assert result == token result = metadata.name(token=True) - self.assertEqual(self.default, result) + assert result == self.default def test_default(self): metadata = self._make() result = metadata.name() - self.assertEqual(self.default, result) + assert result == self.default result = metadata.name(token=True) - self.assertEqual(self.default, result) + assert result == self.default def test_default__invalid_token(self): token = "nope nope" metadata = self._make() result = metadata.name(default=token) - self.assertEqual(token, result) + assert result == token emsg = "Cannot retrieve a valid name token" - with self.assertRaisesRegex(ValueError, emsg): - metadata.name(default=token, token=True) + with pytest.raises(ValueError, match=emsg): + _ = metadata.name(default=token, token=True) -class Test_token(tests.IrisTest): - def setUp(self): +class Test_token: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = BaseMetadata - def test_passthru_None(self): + def test_passthru_none(self): result = self.cls.token(None) - self.assertIsNone(result) + assert result is None def test_fail_leading_underscore(self): result = self.cls.token("_nope") - self.assertIsNone(result) + assert result is None def test_fail_leading_dot(self): result = self.cls.token(".nope") - self.assertIsNone(result) + assert result is None def test_fail_leading_plus(self): result = self.cls.token("+nope") - self.assertIsNone(result) + assert result is None def test_fail_leading_at(self): result = self.cls.token("@nope") - self.assertIsNone(result) + assert result is None def test_fail_space(self): result = self.cls.token("nope nope") - self.assertIsNone(result) + assert result is None def test_fail_colon(self): result = self.cls.token("nope:") - self.assertIsNone(result) + assert result is None def test_pass_simple(self): token = "simple" result = self.cls.token(token) - self.assertEqual(token, result) + assert result == token def test_pass_leading_digit(self): token = "123simple" result = self.cls.token(token) - self.assertEqual(token, result) + assert result == token def test_pass_mixture(self): token = "S.imple@one+two_3" result = self.cls.token(token) - self.assertEqual(token, result) - - -if __name__ == "__main__": - tests.main() + assert result == token diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index 3618d2ace5..9cabd21d01 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -4,26 +4,23 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CellMeasureMetadata -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.measure = mock.sentinel.measure +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes + self.measure = mocker.sentinel.measure self.cls = CellMeasureMetadata def test_repr(self): @@ -47,7 +44,7 @@ def test_repr(self): self.attributes, self.measure, ) - self.assertEqual(expected, repr(metadata)) + assert repr(metadata) == expected def test__fields(self): expected = ( @@ -58,148 +55,148 @@ def test__fields(self): "attributes", "measure", ) - self.assertEqual(self.cls._fields, expected) + assert self.cls._fields == expected def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) + assert issubclass(self.cls, BaseMetadata) -class Test___eq__(tests.IrisTest): - def setUp(self): +class Test___eq__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + measure=mocker.sentinel.measure, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CellMeasureMetadata def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + assert self.cls.__eq__.__doc__ == BaseMetadata.__eq__.__doc__ def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) + assert qualname___eq__ in _LENIENT + assert _LENIENT[qualname___eq__] + assert _LENIENT[self.cls.__eq__] - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value + def test_call(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): + patcher = mocker.patch.object(BaseMetadata, "__eq__", return_value=return_value) + result = metadata.__eq__(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == {} + + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_lenient_same_measure_none(self): + def test_op_lenient_same_measure_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_lenient_different_measure(self): + def test_op_lenient_different_measure(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_different_measure(self): + def test_op_strict_different_measure(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - def test_op_strict_different_measure_none(self): + def test_op_strict_different_measure_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) -class Test___lt__(tests.IrisTest): - def setUp(self): +class Test___lt__: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CellMeasureMetadata self.one = self.cls(1, 1, 1, 1, 1, 1) self.two = self.cls(1, 1, 1, 2, 1, 1) @@ -208,111 +205,112 @@ def setUp(self): def test__ascending_lt(self): result = self.one < self.two - self.assertTrue(result) + assert result def test__descending_lt(self): result = self.two < self.one - self.assertFalse(result) + assert not result def test__none_rhs_operand(self): result = self.one < self.none - self.assertFalse(result) + assert not result def test__none_lhs_operand(self): result = self.none < self.one - self.assertTrue(result) + assert result def test__ignore_attributes(self): result = self.one < self.attributes - self.assertFalse(result) + assert not result result = self.attributes < self.one - self.assertFalse(result) + assert not result -class Test_combine(tests.IrisTest): - def setUp(self): +class Test_combine: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + measure=mocker.sentinel.measure, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CellMeasureMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) + assert self.cls.combine.__doc__ == BaseMetadata.combine.__doc__ def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_combine in _LENIENT + assert _LENIENT[qualname_combine] + assert _LENIENT[self.cls.combine] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.combine(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) + ) + result = self.none.combine(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_same_measure_none(self): + def test_op_lenient_same_measure_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = None rmetadata = self.cls(**right) expected = right.copy() - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy @@ -320,11 +318,11 @@ def test_op_lenient_different(self): expected = self.values.copy() expected["units"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_different_measure(self): + def test_op_lenient_different_measure(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = self.dummy @@ -332,20 +330,20 @@ def test_op_lenient_different_measure(self): expected = self.values.copy() expected["measure"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values.copy() - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy @@ -353,11 +351,11 @@ def test_op_strict_different(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different_measure(self): + def test_op_strict_different_measure(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = self.dummy @@ -365,11 +363,11 @@ def test_op_strict_different_measure(self): expected = self.values.copy() expected["measure"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None @@ -377,11 +375,11 @@ def test_op_strict_different_none(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different_measure_none(self): + def test_op_strict_different_measure_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = None @@ -389,96 +387,97 @@ def test_op_strict_different_measure_none(self): expected = self.values.copy() expected["measure"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected -class Test_difference(tests.IrisTest): - def setUp(self): +class Test_difference: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + measure=mocker.sentinel.measure, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CellMeasureMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) + assert self.cls.difference.__doc__ == BaseMetadata.difference.__doc__ def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_difference in _LENIENT + assert _LENIENT[qualname_difference] + assert _LENIENT[self.cls.difference] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.difference(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) + ) + result = self.none.difference(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_lenient_same_measure_none(self): + def test_op_lenient_same_measure_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["measure"] = None rmetadata = self.cls(**right) lexpected = deepcopy(self.none)._asdict() - lexpected["measure"] = (sentinel.measure, None) + lexpected["measure"] = (mocker.sentinel.measure, None) rexpected = deepcopy(self.none)._asdict() - rexpected["measure"] = (None, sentinel.measure) + rexpected["measure"] = (None, mocker.sentinel.measure) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -489,11 +488,11 @@ def test_op_lenient_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["units"] = lexpected["units"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_lenient_different_measure(self): + def test_op_lenient_different_measure(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -504,19 +503,19 @@ def test_op_lenient_different_measure(self): rexpected = deepcopy(self.none)._asdict() rexpected["measure"] = lexpected["measure"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -527,11 +526,11 @@ def test_op_strict_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_different_measure(self): + def test_op_strict_different_measure(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -542,11 +541,11 @@ def test_op_strict_different_measure(self): rexpected = deepcopy(self.none)._asdict() rexpected["measure"] = lexpected["measure"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -557,11 +556,11 @@ def test_op_strict_different_none(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_strict_different_measure_none(self): + def test_op_strict_different_measure_none(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -572,54 +571,47 @@ def test_op_strict_different_measure_none(self): rexpected = deepcopy(self.none)._asdict() rexpected["measure"] = lexpected["measure"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected -class Test_equal(tests.IrisTest): - def setUp(self): +class Test_equal: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CellMeasureMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + assert self.cls.equal.__doc__ == BaseMetadata.equal.__doc__ def test_lenient_service(self): qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() + assert qualname_equal in _LENIENT + assert _LENIENT[qualname_equal] + assert _LENIENT[self.cls.equal] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other, lenient=lenient) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py index 010838b7fc..bb6b2115d2 100644 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py @@ -4,27 +4,24 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.metadata.CoordMetadata`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel + +import pytest from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CoordMetadata -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.coord_system = mock.sentinel.coord_system - self.climatological = mock.sentinel.climatological +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes + self.coord_system = mocker.sentinel.coord_system + self.climatological = mocker.sentinel.climatological self.cls = CoordMetadata def test_repr(self): @@ -51,7 +48,7 @@ def test_repr(self): self.coord_system, self.climatological, ) - self.assertEqual(expected, repr(metadata)) + assert repr(metadata) == expected def test__fields(self): expected = ( @@ -63,153 +60,153 @@ def test__fields(self): "coord_system", "climatological", ) - self.assertEqual(self.cls._fields, expected) + assert self.cls._fields == expected def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) + assert issubclass(self.cls, BaseMetadata) -class Test___eq__(tests.IrisTest): - def setUp(self): +class Test___eq__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + coord_system=mocker.sentinel.coord_system, + climatological=mocker.sentinel.climatological, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CoordMetadata def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + assert self.cls.__eq__.__doc__ == BaseMetadata.__eq__.__doc__ def test_lenient_service(self): qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) + assert qualname___eq__ in _LENIENT + assert _LENIENT[qualname___eq__] + assert _LENIENT[self.cls.__eq__] - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value + def test_call(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): + patcher = mocker.patch.object(BaseMetadata, "__eq__", return_value=return_value) + result = metadata.__eq__(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == {} + + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) + + def test_op_lenient_same_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_same_members_none(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None rmetadata = self.cls(**right) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) + + def test_op_lenient_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_different_members(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy rmetadata = self.cls(**right) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) + + def test_op_strict_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = self.dummy rmetadata = self.cls(**right) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) + + def test_op_strict_different_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members_none(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) -class Test___lt__(tests.IrisTest): - def setUp(self): +class Test___lt__: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CoordMetadata self.one = self.cls(1, 1, 1, 1, 1, 1, 1) self.two = self.cls(1, 1, 1, 2, 1, 1, 1) @@ -218,113 +215,114 @@ def setUp(self): def test__ascending_lt(self): result = self.one < self.two - self.assertTrue(result) + assert result def test__descending_lt(self): result = self.two < self.one - self.assertFalse(result) + assert not result def test__none_rhs_operand(self): result = self.one < self.none - self.assertFalse(result) + assert not result def test__none_lhs_operand(self): result = self.none < self.one - self.assertTrue(result) + assert result def test__ignore_attributes_coord_system(self): result = self.one < self.attributes_cs - self.assertFalse(result) + assert not result result = self.attributes_cs < self.one - self.assertFalse(result) + assert not result -class Test_combine(tests.IrisTest): - def setUp(self): +class Test_combine: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + coord_system=mocker.sentinel.coord_system, + climatological=mocker.sentinel.climatological, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CoordMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.combine.__doc__, self.cls.combine.__doc__) + assert self.cls.combine.__doc__ == BaseMetadata.combine.__doc__ def test_lenient_service(self): qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_combine in _LENIENT + assert _LENIENT[qualname_combine] + assert _LENIENT[self.cls.combine] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.combine(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) + ) + result = self.none.combine(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) expected = self.values - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_lenient_same_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_same_members_none(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() right[member] = None rmetadata = self.cls(**right) expected = right.copy() + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["units"] = self.dummy @@ -332,11 +330,13 @@ def test_op_lenient_different(self): expected = self.values.copy() expected["units"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_lenient_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_different_members(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() @@ -344,21 +344,19 @@ def test_op_lenient_different_members(self): rmetadata = self.cls(**right) expected = self.values.copy() expected[member] = None + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) expected = self.values.copy() - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = self.dummy @@ -366,11 +364,13 @@ def test_op_strict_different(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_strict_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() @@ -378,12 +378,10 @@ def test_op_strict_different_members(self): rmetadata = self.cls(**right) expected = self.values.copy() expected[member] = None + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["long_name"] = None @@ -391,11 +389,13 @@ def test_op_strict_different_none(self): expected = self.values.copy() expected["long_name"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected + + def test_op_strict_different_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members_none(self): for member in self.cls._members: lmetadata = self.cls(**self.values) right = self.values.copy() @@ -403,84 +403,85 @@ def test_op_strict_different_members_none(self): rmetadata = self.cls(**right) expected = self.values.copy() expected[member] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected -class Test_difference(tests.IrisTest): - def setUp(self): +class Test_difference: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + coord_system=mocker.sentinel.coord_system, + climatological=mocker.sentinel.climatological, ) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CoordMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.difference.__doc__, self.cls.difference.__doc__) + assert self.cls.difference.__doc__ == BaseMetadata.difference.__doc__ def test_lenient_service(self): qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + assert qualname_difference in _LENIENT + assert _LENIENT[qualname_difference] + assert _LENIENT[self.cls.difference] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + ) + result = self.none.difference(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) + ) + result = self.none.difference(other, lenient=lenient) - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) - def test_op_lenient_same(self): + def test_op_lenient_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_lenient_same_none(self): + def test_op_lenient_same_none(self, mocker): lmetadata = self.cls(**self.values) right = self.values.copy() right["var_name"] = None rmetadata = self.cls(**right) - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None + + def test_op_lenient_same_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_same_members_none(self): for member in self.cls._members: lmetadata = self.cls(**self.values) member_value = getattr(lmetadata, member) @@ -491,12 +492,10 @@ def test_op_lenient_same_members_none(self): lexpected[member] = (member_value, None) rexpected = deepcopy(self.none)._asdict() rexpected[member] = (None, member_value) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) - - def test_op_lenient_different(self): + def test_op_lenient_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -507,11 +506,13 @@ def test_op_lenient_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["units"] = lexpected["units"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=True) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + + def test_op_lenient_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=True) - def test_op_lenient_different_members(self): for member in self.cls._members: left = self.values.copy() lmetadata = self.cls(**left) @@ -522,20 +523,18 @@ def test_op_lenient_different_members(self): lexpected[member] = (left[member], right[member]) rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) - - def test_op_strict_same(self): + def test_op_strict_same(self, mocker): lmetadata = self.cls(**self.values) rmetadata = self.cls(**self.values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_strict_different(self): + def test_op_strict_different(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -546,11 +545,13 @@ def test_op_strict_different(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + + def test_op_strict_different_members(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members(self): for member in self.cls._members: left = self.values.copy() lmetadata = self.cls(**left) @@ -561,12 +562,10 @@ def test_op_strict_different_members(self): lexpected[member] = (left[member], right[member]) rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) - - def test_op_strict_different_none(self): + def test_op_strict_different_none(self, mocker): left = self.values.copy() lmetadata = self.cls(**left) right = self.values.copy() @@ -577,11 +576,13 @@ def test_op_strict_different_none(self): rexpected = deepcopy(self.none)._asdict() rexpected["long_name"] = lexpected["long_name"][::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + mocker.patch("iris.common.metadata._LENIENT", return_value=False) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + + def test_op_strict_different_members_none(self, mocker): + mocker.patch("iris.common.metadata._LENIENT", return_value=False) - def test_op_strict_different_members_none(self): for member in self.cls._members: left = self.values.copy() lmetadata = self.cls(**left) @@ -592,55 +593,46 @@ def test_op_strict_different_members_none(self): lexpected[member] = (left[member], right[member]) rexpected = deepcopy(self.none)._asdict() rexpected[member] = lexpected[member][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(lexpected, lmetadata.difference(rmetadata)._asdict()) - self.assertEqual(rexpected, rmetadata.difference(lmetadata)._asdict()) + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected -class Test_equal(tests.IrisTest): - def setUp(self): +class Test_equal: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CoordMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + assert self.cls.equal.__doc__ == BaseMetadata.equal.__doc__ def test_lenient_service(self): qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() + assert qualname_equal in _LENIENT + assert _LENIENT[qualname_equal] + assert _LENIENT[self.cls.equal] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other, lenient=lenient) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 7d51cbfb37..0b4725da42 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -6,13 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. -from typing import Any, ClassVar - -import iris.tests as tests # isort:skip - from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel +from typing import Any, ClassVar import pytest @@ -44,14 +39,15 @@ def _make_metadata( ) -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.cell_methods = mock.sentinel.cell_methods +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes + self.cell_methods = mocker.sentinel.cell_methods self.cls = CubeMetadata def test_repr(self): @@ -75,7 +71,7 @@ def test_repr(self): self.attributes, self.cell_methods, ) - self.assertEqual(expected, repr(metadata)) + assert repr(metadata) == expected def test__fields(self): expected = ( @@ -86,10 +82,10 @@ def test__fields(self): "attributes", "cell_methods", ) - self.assertEqual(self.cls._fields, expected) + assert self.cls._fields == expected def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) + assert issubclass(self.cls, BaseMetadata) @pytest.fixture(params=CubeMetadata._fields) # type: ignore[attr-defined] @@ -99,7 +95,7 @@ def fieldname(request): @pytest.fixture(params=["strict", "lenient"]) -def op_leniency(request): +def leniency(request): """Parametrize testing over strict or lenient operation.""" return request.param @@ -355,7 +351,7 @@ class MixinSplitattrsMatrixTests: def test_splitattrs_cases( self, - op_leniency, + leniency, primary_values, primary_is_global_not_local, order_reversed, @@ -370,7 +366,7 @@ def test_splitattrs_cases( * left-to-right or right-to-left operation order. """ primary_inputs = primary_values[-2:] - check_is_lenient = {"strict": False, "lenient": True}[op_leniency] + check_is_lenient = {"strict": False, "lenient": True}[leniency] check_splitattrs_testcase( operation_name=self.operation_name, check_is_lenient=check_is_lenient, @@ -393,7 +389,7 @@ def test_splitattrs_cases( ) def test_splitattrs_global_local_independence( self, - op_leniency, + leniency, primary_values, secondary_values, ): @@ -414,7 +410,7 @@ def test_splitattrs_global_local_independence( """ primary_inputs = primary_values[-2:] secondary_inputs = secondary_values[-2:] - check_is_lenient = {"strict": False, "lenient": True}[op_leniency] + check_is_lenient = {"strict": False, "lenient": True}[leniency] check_splitattrs_testcase( operation_name=self.operation_name, check_is_lenient=check_is_lenient, @@ -429,19 +425,19 @@ class Test___eq__(MixinSplitattrsMatrixTests): operation_name = "equal" @pytest.fixture(autouse=True) - def setup(self): + def _setup(self, mocker): self.lvalues = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, # Must be a mapping. attributes=dict(), - cell_methods=sentinel.cell_methods, + cell_methods=mocker.sentinel.cell_methods, ) # Setup another values tuple with all-distinct content objects. self.rvalues = deepcopy(self.lvalues) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CubeMetadata def test_wraps_docstring(self): @@ -453,37 +449,35 @@ def test_lenient_service(self): assert _LENIENT[qualname___eq__] assert _LENIENT[self.cls.__eq__] - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value + def test_call(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) + patcher = mocker.patch.object(BaseMetadata, "__eq__", return_value=return_value) + result = metadata.__eq__(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other)] + assert result == return_value + assert patcher.call_args_list == [mocker.call(other)] - def test_op_same(self, op_leniency): + def test_op_same(self, leniency, mocker): # Check op all-same content, but all-new data. # NOTE: test for both strict/lenient, should both work the same. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) - assert rmetadata.__eq__(lmetadata) + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) + assert rmetadata.__eq__(lmetadata) - def test_op_different__none(self, fieldname, op_leniency): + def test_op_different__none(self, fieldname, leniency, mocker): # One side has field=value, and the other field=None, both strict + lenient. if fieldname == "attributes": # Must be a dict, cannot be None. pytest.skip() else: - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) self.rvalues.update({fieldname: None}) rmetadata = self.cls(**self.rvalues) @@ -497,18 +491,18 @@ def test_op_different__none(self, fieldname, op_leniency): # Ensure we are handling all the different field cases raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success - def test_op_different__value(self, fieldname, op_leniency): + def test_op_different__value(self, fieldname, leniency, mocker): # Compare when a given field value is changed, both strict + lenient. if fieldname == "attributes": # Dicts have more possibilities: handled separately. pytest.skip() else: - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) self.rvalues.update({fieldname: self.dummy}) rmetadata = self.cls(**self.rvalues) @@ -527,39 +521,40 @@ def test_op_different__value(self, fieldname, op_leniency): # Ensure we are handling all the different field cases raise ValueError(f"{self.__name__} unhandled fieldname : {fieldname}") - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success - def test_op_different__attribute_extra(self, op_leniency): + def test_op_different__attribute_extra(self, leniency, mocker): # Check when one set of attributes has an extra entry. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) self.rvalues["attributes"]["_extra_"] = 1 rmetadata = self.cls(**self.rvalues) # This counts as equal *only* in the lenient case. expect_success = is_lenient - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check equality both l==r and r==l. - assert lmetadata.__eq__(rmetadata) == expect_success - assert rmetadata.__eq__(lmetadata) == expect_success + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check equality both l==r and r==l. + assert lmetadata.__eq__(rmetadata) == expect_success + assert rmetadata.__eq__(lmetadata) == expect_success - def test_op_different__attribute_value(self, op_leniency): + def test_op_different__attribute_value(self, leniency, mocker): # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" - self.lvalues["attributes"]["_extra_"] = mock.sentinel.value1 - self.rvalues["attributes"]["_extra_"] = mock.sentinel.value2 + is_lenient = leniency == "lenient" + self.lvalues["attributes"]["_extra_"] = mocker.sentinel.value1 + self.rvalues["attributes"]["_extra_"] = mocker.sentinel.value2 lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # This should ALWAYS fail. - assert not lmetadata.__eq__(rmetadata) - assert not rmetadata.__eq__(lmetadata) + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # This should ALWAYS fail. + assert not lmetadata.__eq__(rmetadata) + assert not rmetadata.__eq__(lmetadata) -class Test___lt__(tests.IrisTest): - def setUp(self): +class Test___lt__: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CubeMetadata self.one = self.cls(1, 1, 1, 1, 1, 1) self.two = self.cls(1, 1, 1, 2, 1, 1) @@ -568,43 +563,43 @@ def setUp(self): def test__ascending_lt(self): result = self.one < self.two - self.assertTrue(result) + assert result def test__descending_lt(self): result = self.two < self.one - self.assertFalse(result) + assert not result def test__none_rhs_operand(self): result = self.one < self.none - self.assertFalse(result) + assert not result def test__none_lhs_operand(self): result = self.none < self.one - self.assertTrue(result) + assert result def test__ignore_attributes_cell_methods(self): result = self.one < self.attributes_cm - self.assertFalse(result) + assert not result result = self.attributes_cm < self.one - self.assertFalse(result) + assert not result class Test_combine(MixinSplitattrsMatrixTests): operation_name = "combine" @pytest.fixture(autouse=True) - def setup(self): + def _setup(self, mocker): self.lvalues = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cell_methods=sentinel.cell_methods, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + cell_methods=mocker.sentinel.cell_methods, ) # Get a second copy with all-new objects. self.rvalues = deepcopy(self.lvalues) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) @@ -617,48 +612,48 @@ def test_lenient_service(self): assert _LENIENT[qualname_combine] assert _LENIENT[self.cls.combine] - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) + ) + result = self.none.combine(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=None)] + assert result == return_value + assert patcher.call_args_list == [mocker.call(other, lenient=None)] - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) + ) + result = self.none.combine(other, lenient=lenient) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=lenient)] + assert result == return_value + assert patcher.call_args_list == [mocker.call(other, lenient=lenient)] - def test_op_same(self, op_leniency): + def test_op_same(self, leniency, mocker): # Result is same as either input, both strict + lenient. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) expected = self.lvalues - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_different__none(self, fieldname, op_leniency): + def test_op_different__none(self, fieldname, leniency, mocker): # One side has field=value, and the other field=None, both strict + lenient. if fieldname == "attributes": # Can't be None : Tested separately pytest.skip() - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) # Cancel one setting in the rhs argument. @@ -682,21 +677,21 @@ def test_op_different__none(self, fieldname, op_leniency): # also include those which only 1 has expected = self.lvalues - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_different__value(self, fieldname, op_leniency): + def test_op_different__value(self, fieldname, leniency, mocker): # One field has different value for lhs/rhs, both strict + lenient. if fieldname == "attributes": # Attribute behaviours are tested separately pytest.skip() - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" - self.lvalues[fieldname] = mock.sentinel.value1 - self.rvalues[fieldname] = mock.sentinel.value2 + self.lvalues[fieldname] = mocker.sentinel.value1 + self.rvalues[fieldname] = mocker.sentinel.value2 lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) @@ -704,18 +699,18 @@ def test_op_different__value(self, fieldname, op_leniency): expected = self.lvalues.copy() expected[fieldname] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_different__attribute_extra(self, op_leniency): + def test_op_different__attribute_extra(self, leniency, mocker): # One field has an extra attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" - self.lvalues["attributes"] = {"_a_common_": mock.sentinel.dummy} + self.lvalues["attributes"] = {"_a_common_": mocker.sentinel.dummy} self.rvalues["attributes"] = self.lvalues["attributes"].copy() - self.rvalues["attributes"]["_extra_"] = mock.sentinel.testvalue + self.rvalues["attributes"]["_extra_"] = mocker.sentinel.testvalue lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) @@ -726,22 +721,22 @@ def test_op_different__attribute_extra(self, op_leniency): # .. it should not expected = self.lvalues - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected - def test_op_different__attribute_value(self, op_leniency): + def test_op_different__attribute_value(self, leniency, mocker): # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" self.lvalues["attributes"] = { "_a_common_": self.dummy, - "_b_common_": mock.sentinel.value1, + "_b_common_": mocker.sentinel.value1, } self.lvalues["attributes"] = { "_a_common_": self.dummy, - "_b_common_": mock.sentinel.value2, + "_b_common_": mocker.sentinel.value2, } lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) @@ -751,28 +746,28 @@ def test_op_different__attribute_value(self, op_leniency): expected = self.lvalues.copy() expected["attributes"] = None - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # Check both l+r and r+l - assert lmetadata.combine(rmetadata)._asdict() == expected - assert rmetadata.combine(lmetadata)._asdict() == expected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # Check both l+r and r+l + assert lmetadata.combine(rmetadata)._asdict() == expected + assert rmetadata.combine(lmetadata)._asdict() == expected class Test_difference(MixinSplitattrsMatrixTests): operation_name = "difference" @pytest.fixture(autouse=True) - def setup(self): + def _setup(self, mocker): self.lvalues = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, attributes=dict(), # MUST be a dict - cell_methods=sentinel.cell_methods, + cell_methods=mocker.sentinel.cell_methods, ) # Make a copy with all-different objects in it. self.rvalues = deepcopy(self.lvalues) - self.dummy = sentinel.dummy + self.dummy = mocker.sentinel.dummy self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) @@ -785,45 +780,45 @@ def test_lenient_service(self): assert _LENIENT[qualname_difference] assert _LENIENT[self.cls.difference] - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) + ) + result = self.none.difference(other) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=None)] + assert result == return_value + assert patcher.call_args_list == [mocker.call(other, lenient=None)] - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object( BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) + ) + result = self.none.difference(other, lenient=lenient) - assert return_value == result - assert mocker.call_args_list == [mock.call(other, lenient=lenient)] + assert result == return_value + assert patcher.call_args_list == [mocker.call(other, lenient=lenient)] - def test_op_same(self, op_leniency): - is_lenient = op_leniency == "lenient" + def test_op_same(self, leniency, mocker): + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_different__none(self, fieldname, op_leniency): + def test_op_different__none(self, fieldname, leniency, mocker): # One side has field=value, and the other field=None, both strict + lenient. if fieldname in ("attributes",): # These cannot properly be set to 'None'. Tested elsewhere. pytest.skip() - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" lmetadata = self.cls(**self.lvalues) self.rvalues[fieldname] = None @@ -848,28 +843,28 @@ def test_op_different__none(self, fieldname, op_leniency): rexpected = lexpected.copy() rexpected[fieldname] = diffentry[::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - if strict_result: - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected - else: - # Expect NO differences - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + if strict_result: + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected + else: + # Expect NO differences + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None - def test_op_different__value(self, fieldname, op_leniency): + def test_op_different__value(self, fieldname, leniency, mocker): # One field has different value for lhs/rhs, both strict + lenient. if fieldname == "attributes": # Attribute behaviours are tested separately pytest.skip() - self.lvalues[fieldname] = mock.sentinel.value1 - self.rvalues[fieldname] = mock.sentinel.value2 + self.lvalues[fieldname] = mocker.sentinel.value1 + self.rvalues[fieldname] = mocker.sentinel.value2 lmetadata = self.cls(**self.lvalues) rmetadata = self.cls(**self.rvalues) # In all cases, this field should show a difference : leniency has no effect - ldiff_values = (mock.sentinel.value1, mock.sentinel.value2) + ldiff_values = (mocker.sentinel.value1, mocker.sentinel.value2) ldiff_metadata = self.none._asdict() ldiff_metadata[fieldname] = ldiff_values rdiff_metadata = self.none._asdict() @@ -879,52 +874,52 @@ def test_op_different__value(self, fieldname, op_leniency): assert lmetadata.difference(rmetadata)._asdict() == ldiff_metadata assert rmetadata.difference(lmetadata)._asdict() == rdiff_metadata - def test_op_different__attribute_extra(self, op_leniency): + def test_op_different__attribute_extra(self, leniency, mocker): # One field has an extra attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" self.lvalues["attributes"] = {"_a_common_": self.dummy} lmetadata = self.cls(**self.lvalues) rvalues = deepcopy(self.lvalues) - rvalues["attributes"]["_b_extra_"] = mock.sentinel.extra + rvalues["attributes"]["_b_extra_"] = mocker.sentinel.extra rmetadata = self.cls(**rvalues) if not is_lenient: # In this case, attributes returns a "difference dictionary" - diffentry = tuple([{}, {"_b_extra_": mock.sentinel.extra}]) + diffentry = tuple([{}, {"_b_extra_": mocker.sentinel.extra}]) lexpected = self.none._asdict() lexpected["attributes"] = diffentry rexpected = lexpected.copy() rexpected["attributes"] = diffentry[::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - if is_lenient: - # It recognises no difference - assert lmetadata.difference(rmetadata) is None - assert rmetadata.difference(lmetadata) is None - else: - # As calculated above - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + if is_lenient: + # It recognises no difference + assert lmetadata.difference(rmetadata) is None + assert rmetadata.difference(lmetadata) is None + else: + # As calculated above + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected - def test_op_different__attribute_value(self, op_leniency): + def test_op_different__attribute_value(self, leniency, mocker): # lhs and rhs have different values for an attribute, both strict + lenient. - is_lenient = op_leniency == "lenient" + is_lenient = leniency == "lenient" self.lvalues["attributes"] = { "_a_common_": self.dummy, - "_b_extra_": mock.sentinel.value1, + "_b_extra_": mocker.sentinel.value1, } lmetadata = self.cls(**self.lvalues) self.rvalues["attributes"] = { "_a_common_": self.dummy, - "_b_extra_": mock.sentinel.value2, + "_b_extra_": mocker.sentinel.value2, } rmetadata = self.cls(**self.rvalues) # In this case, attributes returns a "difference dictionary" diffentry = tuple( [ - {"_b_extra_": mock.sentinel.value1}, - {"_b_extra_": mock.sentinel.value2}, + {"_b_extra_": mocker.sentinel.value1}, + {"_b_extra_": mocker.sentinel.value2}, ] ) lexpected = self.none._asdict() @@ -932,190 +927,184 @@ def test_op_different__attribute_value(self, op_leniency): rexpected = lexpected.copy() rexpected["attributes"] = diffentry[::-1] - with mock.patch("iris.common.metadata._LENIENT", return_value=is_lenient): - # As calculated above -- same for both strict + lenient - assert lmetadata.difference(rmetadata)._asdict() == lexpected - assert rmetadata.difference(lmetadata)._asdict() == rexpected + mocker.patch("iris.common.metadata._LENIENT", return_value=is_lenient) + # As calculated above -- same for both strict + lenient + assert lmetadata.difference(rmetadata)._asdict() == lexpected + assert rmetadata.difference(lmetadata)._asdict() == rexpected -class Test_equal(tests.IrisTest): - def setUp(self): +class Test_equal: + @pytest.fixture(autouse=True) + def _setup(self): self.cls = CubeMetadata self.none = self.cls(*(None,) * len(self.cls._fields)) def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + assert BaseMetadata.equal.__doc__ == self.cls.equal.__doc__ def test_lenient_service(self): qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -class Test_name(tests.IrisTest): - def setUp(self): + assert qualname_equal in _LENIENT + assert _LENIENT[qualname_equal] + assert _LENIENT[self.cls.equal] + + def test_lenient_default(self, mocker): + other = mocker.sentinel.other + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=None) + + def test_lenient(self, mocker): + other = mocker.sentinel.other + lenient = mocker.sentinel.lenient + return_value = mocker.sentinel.return_value + patcher = mocker.patch.object(BaseMetadata, "equal", return_value=return_value) + result = self.none.equal(other, lenient=lenient) + + assert result == return_value + assert patcher.call_count == 1 + (arg,), kwargs = patcher.call_args + assert arg == other + assert kwargs == dict(lenient=lenient) + + +class Test_name: + @pytest.fixture(autouse=True) + def _setup(self): self.default = CubeMetadata.DEFAULT_NAME def test_standard_name(self): token = "standard_name" metadata = _make_metadata(standard_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, token) + assert result == token def test_standard_name__invalid_token(self): token = "nope nope" metadata = _make_metadata(standard_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, self.default) + assert result == self.default def test_long_name(self): token = "long_name" metadata = _make_metadata(long_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, token) + assert result == token def test_long_name__invalid_token(self): token = "nope nope" metadata = _make_metadata(long_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, self.default) + assert result == self.default def test_var_name(self): token = "var_name" metadata = _make_metadata(var_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, token) + assert result == token def test_var_name__invalid_token(self): token = "nope nope" metadata = _make_metadata(var_name=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, self.default) + assert result == self.default def test_attributes(self): token = "stash" metadata = _make_metadata(attributes=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, token) + assert result == token def test_attributes__invalid_token(self): token = "nope nope" metadata = _make_metadata(attributes=token) result = metadata.name() - self.assertEqual(result, token) + assert result == token result = metadata.name(token=True) - self.assertEqual(result, self.default) + assert result == self.default def test_attributes__non_mapping(self): metadata = _make_metadata(force_mapping=False) - self.assertIsNone(metadata.attributes) + assert metadata.attributes is None emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." - with self.assertRaisesRegex(AttributeError, emsg): + with pytest.raises(AttributeError, match=emsg): _ = metadata.name() def test_default(self): metadata = _make_metadata() result = metadata.name() - self.assertEqual(result, self.default) + assert result == self.default result = metadata.name(token=True) - self.assertEqual(result, self.default) + assert result == self.default def test_default__invalid_token(self): token = "nope nope" metadata = _make_metadata() result = metadata.name(default=token) - self.assertEqual(result, token) + assert result == token emsg = "Cannot retrieve a valid name token" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = metadata.name(default=token, token=True) -class Test__names(tests.IrisTest): +class Test__names: def test_standard_name(self): token = "standard_name" metadata = _make_metadata(standard_name=token) expected = (token, None, None, None) result = metadata._names - self.assertEqual(expected, result) + assert result == expected def test_long_name(self): token = "long_name" metadata = _make_metadata(long_name=token) expected = (None, token, None, None) result = metadata._names - self.assertEqual(expected, result) + assert result == expected def test_var_name(self): token = "var_name" metadata = _make_metadata(var_name=token) expected = (None, None, token, None) result = metadata._names - self.assertEqual(expected, result) + assert result == expected def test_attributes(self): token = "stash" metadata = _make_metadata(attributes=token) expected = (None, None, None, token) result = metadata._names - self.assertEqual(expected, result) + assert result == expected def test_attributes__non_mapping(self): metadata = _make_metadata(force_mapping=False) - self.assertIsNone(metadata.attributes) + assert metadata.attributes is None emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." - with self.assertRaisesRegex(AttributeError, emsg): + with pytest.raises(AttributeError, match=emsg): _ = metadata._names - def test_None(self): + def test_none(self): metadata = _make_metadata() expected = (None, None, None, None) result = metadata._names - self.assertEqual(expected, result) - - -if __name__ == "__main__": - tests.main() + assert result == expected diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py index 63d700e53e..426bf6416a 100644 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py @@ -4,16 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from abc import abstractmethod +import pytest + from iris.common.metadata import _NamedTupleMeta -class Test(tests.IrisTest): +class Test: @staticmethod def names(classes): return [cls.__name__ for cls in classes] @@ -42,11 +40,11 @@ def _members(self): pass expected = ["object"] - self.assertEqual(self.names(Metadata.__bases__), expected) + assert self.names(Metadata.__bases__) == expected expected = ["Metadata", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) + assert self.names(Metadata.__mro__) == expected emsg = "Can't instantiate abstract class" - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = Metadata() def test__no_bases_single_member(self): @@ -56,15 +54,15 @@ class Metadata(metaclass=_NamedTupleMeta): _members = member expected = ["MetadataNamedtuple"] - self.assertEqual(self.names(Metadata.__bases__), expected) + assert self.names(Metadata.__bases__) == expected expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) + assert self.names(Metadata.__mro__) == expected emsg = self.emsg_generate(member) - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = Metadata() metadata = Metadata(1) - self.assertEqual(metadata._fields, (member,)) - self.assertEqual(metadata.arg_one, 1) + assert metadata._fields == (member,) + assert metadata.arg_one == 1 def test__no_bases_multiple_members(self): members = ("arg_one", "arg_two") @@ -73,17 +71,17 @@ class Metadata(metaclass=_NamedTupleMeta): _members = members expected = ["MetadataNamedtuple"] - self.assertEqual(self.names(Metadata.__bases__), expected) + assert self.names(Metadata.__bases__) == expected expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) + assert self.names(Metadata.__mro__) == expected emsg = self.emsg_generate(members) - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = Metadata() values = range(len(members)) metadata = Metadata(*values) - self.assertEqual(metadata._fields, members) + assert metadata._fields == members expected = dict(zip(members, values)) - self.assertEqual(metadata._asdict(), expected) + assert metadata._asdict() == expected def test__multiple_bases_multiple_members(self): members_parent = ("arg_one", "arg_two") @@ -97,26 +95,26 @@ class MetadataChild(MetadataParent): # Check the parent class... expected = ["MetadataParentNamedtuple"] - self.assertEqual(self.names(MetadataParent.__bases__), expected) + assert self.names(MetadataParent.__bases__) == expected expected = [ "MetadataParent", "MetadataParentNamedtuple", "tuple", "object", ] - self.assertEqual(self.names(MetadataParent.__mro__), expected) + assert self.names(MetadataParent.__mro__) == expected emsg = self.emsg_generate(members_parent) - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = MetadataParent() values_parent = range(len(members_parent)) metadata_parent = MetadataParent(*values_parent) - self.assertEqual(metadata_parent._fields, members_parent) + assert metadata_parent._fields == members_parent expected = dict(zip(members_parent, values_parent)) - self.assertEqual(metadata_parent._asdict(), expected) + assert metadata_parent._asdict() == expected # Check the dependent child class... expected = ["MetadataChildNamedtuple", "MetadataParent"] - self.assertEqual(self.names(MetadataChild.__bases__), expected) + assert self.names(MetadataChild.__bases__) == expected expected = [ "MetadataChild", "MetadataChildNamedtuple", @@ -125,17 +123,13 @@ class MetadataChild(MetadataParent): "tuple", "object", ] - self.assertEqual(self.names(MetadataChild.__mro__), expected) + assert self.names(MetadataChild.__mro__) == expected emsg = self.emsg_generate((*members_parent, *members_child)) - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = MetadataChild() fields_child = (*members_parent, *members_child) values_child = range(len(fields_child)) metadata_child = MetadataChild(*values_child) - self.assertEqual(metadata_child._fields, fields_child) + assert metadata_child._fields == fields_child expected = dict(zip(fields_child, values_child)) - self.assertEqual(metadata_child._asdict(), expected) - - -if __name__ == "__main__": - tests.main() + assert metadata_child._asdict() == expected diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py index 1a0a0e0120..0ce0dcd706 100644 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ b/lib/iris/tests/unit/common/metadata/test_hexdigest.py @@ -4,21 +4,17 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.metadata.hexdigest`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np import numpy.ma as ma +import pytest from xxhash import xxh64, xxh64_hexdigest from iris.common.metadata import hexdigest -class TestBytesLikeObject(tests.IrisTest): - def setUp(self): +class TestBytesLikeObject: + @pytest.fixture(autouse=True) + def _setup(self): self.hasher = xxh64() self.hasher.reset() @@ -42,77 +38,77 @@ def test_string(self): value = "hello world" self.hasher.update(value) expected = self.hasher.hexdigest() - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_array_int(self): value = np.arange(10, dtype=np.int_) expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_array_float(self): value = np.arange(10, dtype=np.float64) expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_array_float_not_int(self): ivalue = np.arange(10, dtype=np.int_) fvalue = np.arange(10, dtype=np.float64) expected = self._ndarray(ivalue) - self.assertNotEqual(expected, hexdigest(fvalue)) + assert hexdigest(fvalue) != expected def test_numpy_array_reshape(self): value = np.arange(10).reshape(2, 5) expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_array_reshape_not_flat(self): value = np.arange(10).reshape(2, 5) expected = self._ndarray(value) - self.assertNotEqual(expected, hexdigest(value.flatten())) + assert hexdigest(value.flatten()) != expected def test_masked_array_int(self): value = ma.arange(10, dtype=np.int_) expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected value[0] = ma.masked - self.assertNotEqual(expected, hexdigest(value)) + assert hexdigest(value) != expected expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_masked_array_float(self): value = ma.arange(10, dtype=np.float64) expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected value[0] = ma.masked - self.assertNotEqual(expected, hexdigest(value)) + assert hexdigest(value) != expected expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_masked_array_float_not_int(self): ivalue = ma.arange(10, dtype=np.int_) fvalue = ma.arange(10, dtype=np.float64) expected = self._masked(ivalue) - self.assertNotEqual(expected, hexdigest(fvalue)) + assert hexdigest(fvalue) != expected def test_masked_array_not_array(self): value = ma.arange(10) expected = self._masked(value) - self.assertNotEqual(expected, hexdigest(value.data)) + assert hexdigest(value.data) != expected def test_masked_array_reshape(self): value = ma.arange(10).reshape(2, 5) expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_masked_array_reshape_not_flat(self): value = ma.arange(10).reshape(2, 5) expected = self._masked(value) - self.assertNotEqual(expected, hexdigest(value.flatten())) + assert hexdigest(value.flatten()) != expected -class TestNotBytesLikeObject(tests.IrisTest): +class TestNotBytesLikeObject: def _expected(self, value): parts = str((type(value), value)) return xxh64_hexdigest(parts) @@ -120,42 +116,42 @@ def _expected(self, value): def test_int(self): value = 123 expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_int(self): value = int(123) expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_float(self): value = 123.4 expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_numpy_float(self): value = float(123.4) expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_list(self): value = [1, 2, 3] expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_tuple(self): value = (1, 2, 3) expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_dict(self): value = dict(one=1, two=2, three=3) expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected - def test_sentinel(self): - value = mock.sentinel.value + def test_sentinel(self, mocker): + value = mocker.sentinel.value expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_instance(self): class Dummy: @@ -163,13 +159,9 @@ class Dummy: value = Dummy() expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) + assert hexdigest(value) == expected def test_int_not_str(self): value = 123 expected = self._expected(value) - self.assertNotEqual(expected, hexdigest(str(value))) - - -if __name__ == "__main__": - tests.main() + assert hexdigest(str(value)) != expected diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py index 586a5fe5f8..792e1b83b4 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -5,120 +5,113 @@ """Unit tests for the :func:`iris.common.metadata_filter`.""" import numpy as np +import pytest from iris.common.metadata import CoordMetadata, DimCoordMetadata, metadata_filter from iris.coords import AuxCoord -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests -Mock = tests.mock.Mock - - -class Test_standard(tests.IrisTest): - def test_instances_non_iterable(self): - item = Mock() +class Test_standard: + def test_instances_non_iterable(self, mocker): + item = mocker.Mock() item.name.return_value = "one" result = metadata_filter(item, item="one") - self.assertEqual(1, len(result)) - self.assertIn(item, result) + assert len(result) == 1 + assert item in result - def test_name(self): - name_one = Mock() + def test_name(self, mocker): + name_one = mocker.Mock() name_one.name.return_value = "one" - name_two = Mock() + name_two = mocker.Mock() name_two.name.return_value = "two" input_list = [name_one, name_two] result = metadata_filter(input_list, item="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) + assert name_one in result + assert name_two not in result - def test_item(self): - coord = Mock(__class__=AuxCoord) - mock = Mock() + def test_item(self, mocker): + coord = mocker.Mock(__class__=AuxCoord) + mock = mocker.Mock() input_list = [coord, mock] result = metadata_filter(input_list, item=coord) - self.assertIn(coord, result) - self.assertNotIn(mock, result) + assert coord in result + assert mock not in result - def test_item_metadata(self): - coord = Mock(metadata=CoordMetadata) - dim_coord = Mock(metadata=DimCoordMetadata) + def test_item_metadata(self, mocker): + coord = mocker.Mock(metadata=CoordMetadata) + dim_coord = mocker.Mock(metadata=DimCoordMetadata) input_list = [coord, dim_coord] result = metadata_filter(input_list, item=coord) - self.assertIn(coord, result) - self.assertNotIn(dim_coord, result) + assert coord in result + assert dim_coord not in result - def test_standard_name(self): - name_one = Mock(standard_name="one") - name_two = Mock(standard_name="two") + def test_standard_name(self, mocker): + name_one = mocker.Mock(standard_name="one") + name_two = mocker.Mock(standard_name="two") input_list = [name_one, name_two] result = metadata_filter(input_list, standard_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) + assert name_one in result + assert name_two not in result - def test_long_name(self): - name_one = Mock(long_name="one") - name_two = Mock(long_name="two") + def test_long_name(self, mocker): + name_one = mocker.Mock(long_name="one") + name_two = mocker.Mock(long_name="two") input_list = [name_one, name_two] result = metadata_filter(input_list, long_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) + assert name_one in result + assert name_two not in result - def test_var_name(self): - name_one = Mock(var_name="one") - name_two = Mock(var_name="two") + def test_var_name(self, mocker): + name_one = mocker.Mock(var_name="one") + name_two = mocker.Mock(var_name="two") input_list = [name_one, name_two] result = metadata_filter(input_list, var_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) + assert name_one in result + assert name_two not in result - def test_attributes(self): + def test_attributes(self, mocker): # Confirm that this can handle attrib dicts including np arrays. - attrib_one_two = Mock(attributes={"one": np.arange(1), "two": np.arange(2)}) - attrib_three_four = Mock( + attrib_one_two = mocker.Mock( + attributes={"one": np.arange(1), "two": np.arange(2)} + ) + attrib_three_four = mocker.Mock( attributes={"three": np.arange(3), "four": np.arange(4)} ) input_list = [attrib_one_two, attrib_three_four] result = metadata_filter(input_list, attributes=attrib_one_two.attributes) - self.assertIn(attrib_one_two, result) - self.assertNotIn(attrib_three_four, result) + assert attrib_one_two in result + assert attrib_three_four not in result - def test_invalid_attributes(self): - attrib_one = Mock(attributes={"one": 1}) + def test_invalid_attributes(self, mocker): + attrib_one = mocker.Mock(attributes={"one": 1}) input_list = [attrib_one] - self.assertRaisesRegex( - ValueError, - ".*expecting a dictionary.*", - metadata_filter, - input_list, - attributes="one", - ) + emsg = ".*expecting a dictionary.*" + with pytest.raises(ValueError, match=emsg): + _ = metadata_filter(input_list, attributes="one") - def test_axis__by_guess(self): + def test_axis__by_guess(self, mocker): # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - axis_lon = Mock(standard_name="longitude") + axis_lon = mocker.Mock(standard_name="longitude") del axis_lon.axis - axis_lat = Mock(standard_name="latitude") + axis_lat = mocker.Mock(standard_name="latitude") del axis_lat.axis input_list = [axis_lon, axis_lat] result = metadata_filter(input_list, axis="x") - self.assertIn(axis_lon, result) - self.assertNotIn(axis_lat, result) + assert axis_lon in result + assert axis_lat not in result - def test_axis__by_member(self): - axis_x = Mock(axis="x") - axis_y = Mock(axis="y") + def test_axis__by_member(self, mocker): + axis_x = mocker.Mock(axis="x") + axis_y = mocker.Mock(axis="y") input_list = [axis_x, axis_y] result = metadata_filter(input_list, axis="x") - self.assertEqual(1, len(result)) - self.assertIn(axis_x, result) + assert len(result) == 1 + assert axis_x in result - def test_multiple_args(self): - coord_one = Mock(__class__=AuxCoord, long_name="one") - coord_two = Mock(__class__=AuxCoord, long_name="two") + def test_multiple_args(self, mocker): + coord_one = mocker.Mock(__class__=AuxCoord, long_name="one") + coord_two = mocker.Mock(__class__=AuxCoord, long_name="two") input_list = [coord_one, coord_two] result = metadata_filter(input_list, item=coord_one, long_name="one") - self.assertIn(coord_one, result) - self.assertNotIn(coord_two, result) + assert coord_one in result + assert coord_two not in result diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 1fbf0da084..ab1a8eeb6c 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -4,14 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import pickle -import unittest.mock as mock from cf_units import Unit +import pytest from iris.common.metadata import ( AncillaryVariableMetadata, @@ -33,15 +29,16 @@ ] -class Test_factory(tests.IrisTest): +class Test_factory: def test__kwargs_invalid(self): emsg = "Invalid 'BaseMetadata' field parameters, got 'wibble'." - with self.assertRaisesRegex(ValueError, emsg): - metadata_manager_factory(BaseMetadata, wibble="nope") + with pytest.raises(ValueError, match=emsg): + _ = metadata_manager_factory(BaseMetadata, wibble="nope") -class Test_instance(tests.IrisTest): - def setUp(self): +class Test_instance: + @pytest.fixture(autouse=True) + def _setup(self): self.bases = BASES def test__namespace(self): @@ -62,70 +59,72 @@ def test__namespace(self): for base in self.bases: metadata = metadata_manager_factory(base) for name in namespace: - self.assertTrue(hasattr(metadata, name)) + assert hasattr(metadata, name) if base is CubeMetadata: - self.assertTrue(hasattr(metadata, "_names")) - self.assertIs(metadata.cls, base) + assert hasattr(metadata, "_names") + assert metadata.cls is base def test__kwargs_default(self): for base in self.bases: kwargs = dict(zip(base._fields, [None] * len(base._fields))) metadata = metadata_manager_factory(base) - self.assertEqual(metadata.values._asdict(), kwargs) + assert metadata.values._asdict() == kwargs def test__kwargs(self): for base in self.bases: kwargs = dict(zip(base._fields, range(len(base._fields)))) metadata = metadata_manager_factory(base, **kwargs) - self.assertEqual(metadata.values._asdict(), kwargs) + assert metadata.values._asdict() == kwargs -class Test_instance___eq__(tests.IrisTest): - def setUp(self): +class Test_instance___eq__: + @pytest.fixture(autouse=True) + def _setup(self): self.metadata = metadata_manager_factory(BaseMetadata) def test__not_implemented(self): - self.assertNotEqual(self.metadata, 1) + assert self.metadata != 1 def test__not_is_cls(self): base = BaseMetadata other = metadata_manager_factory(base) - self.assertIs(other.cls, base) + assert other.cls is base other.cls = CoordMetadata - self.assertNotEqual(self.metadata, other) + assert other != self.metadata - def test__not_values(self): - standard_name = mock.sentinel.standard_name + def test__not_values(self, mocker): + standard_name = mocker.sentinel.standard_name other = metadata_manager_factory(BaseMetadata, standard_name=standard_name) - self.assertEqual(other.standard_name, standard_name) - self.assertIsNone(other.long_name) - self.assertIsNone(other.var_name) - self.assertIsNone(other.units) - self.assertIsNone(other.attributes) - self.assertNotEqual(self.metadata, other) + assert other.standard_name == standard_name + assert other.long_name is None + assert other.var_name is None + assert other.units is None + assert other.attributes is None + assert other != self.metadata def test__same_default(self): other = metadata_manager_factory(BaseMetadata) - self.assertEqual(self.metadata, other) + assert other == self.metadata def test__same(self): kwargs = dict(standard_name=1, long_name=2, var_name=3, units=4, attributes=5) metadata = metadata_manager_factory(BaseMetadata, **kwargs) other = metadata_manager_factory(BaseMetadata, **kwargs) - self.assertEqual(metadata.values._asdict(), kwargs) - self.assertEqual(metadata, other) + assert metadata.values._asdict() == kwargs + assert metadata == other -class Test_instance____repr__(tests.IrisTest): - def setUp(self): +class Test_instance____repr__: + @pytest.fixture(autouse=True) + def _setup(self): self.metadata = metadata_manager_factory(BaseMetadata) - def test(self): - standard_name = mock.sentinel.standard_name - long_name = mock.sentinel.long_name - var_name = mock.sentinel.var_name - units = mock.sentinel.units - attributes = mock.sentinel.attributes + def test(self, mocker): + standard_name = mocker.sentinel.standard_name + long_name = mocker.sentinel.long_name + var_name = mocker.sentinel.var_name + units = mocker.sentinel.units + attributes = mocker.sentinel.attributes values = (standard_name, long_name, var_name, units, attributes) for field, value in zip(self.metadata.fields, values): @@ -136,11 +135,12 @@ def test(self): "MetadataManager(standard_name={!r}, long_name={!r}, var_name={!r}, " "units={!r}, attributes={!r})" ) - self.assertEqual(result, expected.format(*values)) + assert result == expected.format(*values) -class Test_instance__pickle(tests.IrisTest): - def setUp(self): +class Test_instance__pickle: + @pytest.fixture(autouse=True) + def _setup(self): self.standard_name = "standard_name" self.long_name = "long_name" self.var_name = "var_name" @@ -156,40 +156,38 @@ def setUp(self): kwargs = dict(zip(BaseMetadata._fields, values)) self.metadata = metadata_manager_factory(BaseMetadata, **kwargs) - def test_pickle(self): + def test_pickle(self, tmp_path): for protocol in range(pickle.HIGHEST_PROTOCOL + 1): - with self.temp_filename(suffix=".pkl") as fname: - with open(fname, "wb") as fout: - pickle.dump(self.metadata, fout, protocol=protocol) - with open(fname, "rb") as fin: - metadata = pickle.load(fin) - self.assertEqual(metadata, self.metadata) + fname = tmp_path / f"pickle_{protocol}.pkl" + with open(fname, "wb") as fout: + pickle.dump(self.metadata, fout, protocol=protocol) + with open(fname, "rb") as fin: + metadata = pickle.load(fin) + assert metadata == self.metadata -class Test_instance__fields(tests.IrisTest): - def setUp(self): +class Test_instance__fields: + @pytest.fixture(autouse=True) + def _setup(self): self.bases = BASES def test(self): for base in self.bases: fields = base._fields metadata = metadata_manager_factory(base) - self.assertEqual(metadata.fields, fields) + assert metadata.fields == fields for field in fields: - hasattr(metadata, field) + assert hasattr(metadata, field) -class Test_instance__values(tests.IrisTest): - def setUp(self): +class Test_instance__values: + @pytest.fixture(autouse=True) + def _setup(self): self.bases = BASES def test(self): for base in self.bases: metadata = metadata_manager_factory(base) result = metadata.values - self.assertIsInstance(result, base) - self.assertEqual(result._fields, base._fields) - - -if __name__ == "__main__": - tests.main() + assert isinstance(result, base) + assert result._fields == base._fields diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index 7d414bfb54..c6de740ba1 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -4,14 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.mixin.CFVariableMixin`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from collections import OrderedDict, namedtuple -from unittest import mock from cf_units import Unit +import pytest from iris.common.metadata import ( AncillaryVariableMetadata, @@ -24,16 +20,17 @@ from iris.common.mixin import CFVariableMixin, LimitedAttributeDict -class Test__getter(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.metadata = mock.sentinel.metadata +class Test__getter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes + self.metadata = mocker.sentinel.metadata - metadata = mock.MagicMock( + metadata = mocker.MagicMock( standard_name=self.standard_name, long_name=self.long_name, var_name=self.var_name, @@ -46,32 +43,33 @@ def setUp(self): self.item._metadata_manager = metadata def test_standard_name(self): - self.assertEqual(self.item.standard_name, self.standard_name) + assert self.item.standard_name == self.standard_name def test_long_name(self): - self.assertEqual(self.item.long_name, self.long_name) + assert self.item.long_name == self.long_name def test_var_name(self): - self.assertEqual(self.item.var_name, self.var_name) + assert self.item.var_name == self.var_name def test_units(self): - self.assertEqual(self.item.units, self.units) + assert self.item.units == self.units def test_attributes(self): - self.assertEqual(self.item.attributes, self.attributes) + assert self.item.attributes == self.attributes def test_metadata(self): - self.assertEqual(self.item.metadata, self.metadata) - - -class Test__setter(tests.IrisTest): - def setUp(self): - metadata = mock.MagicMock( - standard_name=mock.sentinel.standard_name, - long_name=mock.sentinel.long_name, - var_name=mock.sentinel.var_name, - units=mock.sentinel.units, - attributes=mock.sentinel.attributes, + assert self.item.metadata == self.metadata + + +class Test__setter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + metadata = mocker.MagicMock( + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, token=lambda name: name, ) @@ -81,68 +79,67 @@ def setUp(self): def test_standard_name__valid(self): standard_name = "air_temperature" self.item.standard_name = standard_name - self.assertEqual(self.item._metadata_manager.standard_name, standard_name) + assert self.item._metadata_manager.standard_name == standard_name def test_standard_name__none(self): self.item.standard_name = None - self.assertIsNone(self.item._metadata_manager.standard_name) + assert self.item._metadata_manager.standard_name is None def test_standard_name__invalid(self): standard_name = "nope nope" emsg = f"{standard_name!r} is not a valid standard_name" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.item.standard_name = standard_name def test_long_name(self): long_name = "long_name" self.item.long_name = long_name - self.assertEqual(self.item._metadata_manager.long_name, long_name) + assert self.item._metadata_manager.long_name == long_name def test_long_name__none(self): self.item.long_name = None - self.assertIsNone(self.item._metadata_manager.long_name) + assert self.item._metadata_manager.long_name is None def test_var_name(self): var_name = "var_name" self.item.var_name = var_name - self.assertEqual(self.item._metadata_manager.var_name, var_name) + assert self.item._metadata_manager.var_name == var_name def test_var_name__none(self): self.item.var_name = None - self.assertIsNone(self.item._metadata_manager.var_name) + assert self.item._metadata_manager.var_name is None def test_var_name__invalid_token(self): var_name = "nope nope" self.item._metadata_manager.token = lambda name: None emsg = f"{var_name!r} is not a valid NetCDF variable name." - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.item.var_name = var_name def test_attributes(self): attributes = dict(hello="world") self.item.attributes = attributes - self.assertEqual(self.item._metadata_manager.attributes, attributes) - self.assertIsNot(self.item._metadata_manager.attributes, attributes) - self.assertIsInstance( - self.item._metadata_manager.attributes, LimitedAttributeDict - ) + assert self.item._metadata_manager.attributes == attributes + assert self.item._metadata_manager.attributes is not attributes + assert isinstance(self.item._metadata_manager.attributes, LimitedAttributeDict) def test_attributes__none(self): self.item.attributes = None - self.assertEqual(self.item._metadata_manager.attributes, {}) + assert self.item._metadata_manager.attributes == {} -class Test__metadata_setter(tests.IrisTest): - def setUp(self): +class Test__metadata_setter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): class Metadata: def __init__(self): self.cls = BaseMetadata self.fields = BaseMetadata._fields - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes + self.standard_name = mocker.sentinel.standard_name + self.long_name = mocker.sentinel.long_name + self.var_name = mocker.sentinel.var_name + self.units = mocker.sentinel.units + self.attributes = mocker.sentinel.attributes self.token = lambda name: name @property @@ -170,31 +167,31 @@ def values(self): def test_dict(self): metadata = dict(**self.args) self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) + assert self.item._metadata_manager.values == metadata + assert self.item._metadata_manager.attributes is not self.attributes - def test_dict__partial(self): + def test_dict__partial(self, mocker): metadata = dict(**self.args) del metadata["standard_name"] self.item.metadata = metadata - metadata["standard_name"] = mock.sentinel.standard_name - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) + metadata["standard_name"] = mocker.sentinel.standard_name + assert self.item._metadata_manager.values == metadata + assert self.item._metadata_manager.attributes is not self.attributes def test_ordereddict(self): metadata = self.args self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) + assert self.item._metadata_manager.values == metadata + assert self.item._metadata_manager.attributes is not self.attributes - def test_ordereddict__partial(self): + def test_ordereddict__partial(self, mocker): metadata = self.args del metadata["long_name"] del metadata["units"] self.item.metadata = metadata - metadata["long_name"] = mock.sentinel.long_name - metadata["units"] = mock.sentinel.units - self.assertEqual(self.item._metadata_manager.values, metadata) + metadata["long_name"] = mocker.sentinel.long_name + metadata["units"] = mocker.sentinel.units + assert self.item._metadata_manager.values == metadata def test_tuple(self): metadata = tuple(self.args.values()) @@ -205,14 +202,14 @@ def test_tuple(self): for field in self.item._metadata_manager.fields ] ) - self.assertEqual(result, metadata) - self.assertIsNot(self.item._metadata_manager.attributes, self.attributes) + assert result == metadata + assert self.item._metadata_manager.attributes is not self.attributes def test_tuple__missing(self): metadata = list(self.args.values()) del metadata[2] emsg = "Invalid .* metadata, require .* to be specified." - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): self.item.metadata = tuple(metadata) def test_namedtuple(self): @@ -222,10 +219,10 @@ def test_namedtuple(self): ) metadata = Metadata(**self.args) self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == metadata._asdict() + assert self.item._metadata_manager.attributes is not metadata.attributes - def test_namedtuple__partial(self): + def test_namedtuple__partial(self, mocker): Metadata = namedtuple( "Metadata", ("standard_name", "long_name", "var_name", "units") ) @@ -233,20 +230,20 @@ def test_namedtuple__partial(self): metadata = Metadata(**self.args) self.item.metadata = metadata expected = metadata._asdict() - expected.update(dict(attributes=mock.sentinel.attributes)) - self.assertEqual(self.item._metadata_manager.values, expected) + expected.update(dict(attributes=mocker.sentinel.attributes)) + assert self.item._metadata_manager.values == expected def test_class_ancillaryvariablemetadata(self): metadata = AncillaryVariableMetadata(**self.args) self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == metadata._asdict() + assert self.item._metadata_manager.attributes is not metadata.attributes def test_class_basemetadata(self): metadata = BaseMetadata(**self.args) self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata._asdict()) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == metadata._asdict() + assert self.item._metadata_manager.attributes is not metadata.attributes def test_class_cellmeasuremetadata(self): self.args["measure"] = None @@ -254,8 +251,8 @@ def test_class_cellmeasuremetadata(self): self.item.metadata = metadata expected = metadata._asdict() del expected["measure"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == expected + assert self.item._metadata_manager.attributes is not metadata.attributes def test_class_connectivitymetadata(self): self.args.update(dict(cf_role=None, start_index=None, location_axis=None)) @@ -265,8 +262,8 @@ def test_class_connectivitymetadata(self): del expected["cf_role"] del expected["start_index"] del expected["location_axis"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == expected + assert self.item._metadata_manager.attributes is not metadata.attributes def test_class_coordmetadata(self): self.args.update(dict(coord_system=None, climatological=False)) @@ -275,8 +272,8 @@ def test_class_coordmetadata(self): expected = metadata._asdict() del expected["coord_system"] del expected["climatological"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) + assert self.item._metadata_manager.values == expected + assert self.item._metadata_manager.attributes is not metadata.attributes def test_class_cubemetadata(self): self.args["cell_methods"] = None @@ -284,19 +281,20 @@ def test_class_cubemetadata(self): self.item.metadata = metadata expected = metadata._asdict() del expected["cell_methods"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot(self.item._metadata_manager.attributes, metadata.attributes) - - -class Test_rename(tests.IrisTest): - def setUp(self): - metadata = mock.MagicMock( - standard_name=mock.sentinel.standard_name, - long_name=mock.sentinel.long_name, - var_name=mock.sentinel.var_name, - units=mock.sentinel.units, - attributes=mock.sentinel.attributes, - values=mock.sentinel.metadata, + assert self.item._metadata_manager.values == expected + assert self.item._metadata_manager.attributes is not metadata.attributes + + +class Test_rename: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + metadata = mocker.MagicMock( + standard_name=mocker.sentinel.standard_name, + long_name=mocker.sentinel.long_name, + var_name=mocker.sentinel.var_name, + units=mocker.sentinel.units, + attributes=mocker.sentinel.attributes, + values=mocker.sentinel.metadata, token=lambda name: name, ) @@ -306,39 +304,36 @@ def setUp(self): def test__valid_standard_name(self): name = "air_temperature" self.item.rename(name) - self.assertEqual(self.item._metadata_manager.standard_name, name) - self.assertIsNone(self.item._metadata_manager.long_name) - self.assertIsNone(self.item._metadata_manager.var_name) + assert self.item._metadata_manager.standard_name == name + assert self.item._metadata_manager.long_name is None + assert self.item._metadata_manager.var_name is None def test__invalid_standard_name(self): name = "nope nope" self.item.rename(name) - self.assertIsNone(self.item._metadata_manager.standard_name) - self.assertEqual(self.item._metadata_manager.long_name, name) - self.assertIsNone(self.item._metadata_manager.var_name) + assert self.item._metadata_manager.standard_name is None + assert self.item._metadata_manager.long_name == name + assert self.item._metadata_manager.var_name is None -class Test_name(tests.IrisTest): - def setUp(self): +class Test_name: + @pytest.fixture(autouse=True) + def _setup(self, mocker): class Metadata: def __init__(self, name): - self.name = mock.MagicMock(return_value=name) + self.name = mocker.MagicMock(return_value=name) - self.name = mock.sentinel.name + self.name = mocker.sentinel.name metadata = Metadata(self.name) self.item = CFVariableMixin() self.item._metadata_manager = metadata - def test(self): - default = mock.sentinel.default - token = mock.sentinel.token + def test(self, mocker): + default = mocker.sentinel.default + token = mocker.sentinel.token result = self.item.name(default=default, token=token) - self.assertEqual(result, self.name) + assert result == self.name self.item._metadata_manager.name.assert_called_with( default=default, token=token ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py index dfb49f0f8d..ca8fd97c39 100644 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py @@ -4,50 +4,45 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.common.mixin import LimitedAttributeDict -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self): self.forbidden_keys = LimitedAttributeDict.CF_ATTRS_FORBIDDEN self.emsg = "{!r} is not a permitted attribute" def test__invalid_keys(self): for key in self.forbidden_keys: - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): + with pytest.raises(ValueError, match=self.emsg.format(key)): _ = LimitedAttributeDict(**{key: None}) - def test___eq__(self): + def test___eq__(self, mocker): values = dict( - one=mock.sentinel.one, - two=mock.sentinel.two, - three=mock.sentinel.three, + one=mocker.sentinel.one, + two=mocker.sentinel.two, + three=mocker.sentinel.three, ) left = LimitedAttributeDict(**values) right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) + assert left == right + assert left == values def test___eq___numpy(self): values = dict(one=np.arange(1), two=np.arange(2), three=np.arange(3)) left = LimitedAttributeDict(**values) right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) - + assert left == right + assert left == values values = dict(one=np.arange(1), two=np.arange(1), three=np.arange(1)) left = LimitedAttributeDict(dict(one=0, two=0, three=0)) right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) + assert left == right + assert left == values # Test inequality: values = dict(one=np.arange(1), two=np.arange(2), three=np.arange(3)) @@ -55,22 +50,18 @@ def test___eq___numpy(self): right = LimitedAttributeDict( one=np.arange(3), two=np.arange(2), three=np.arange(1) ) - self.assertNotEqual(left, right) - self.assertNotEqual(values, right) + assert right != left + assert right != values def test___setitem__(self): for key in self.forbidden_keys: item = LimitedAttributeDict() - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): + with pytest.raises(ValueError, match=self.emsg.format(key)): item[key] = None def test_update(self): for key in self.forbidden_keys: item = LimitedAttributeDict() - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): - other = {key: None} + other = {key: None} + with pytest.raises(ValueError, match=self.emsg.format(key)): item.update(other) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py index 67ba108333..c7be9d48ca 100644 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py @@ -4,65 +4,60 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.common.mixin import _get_valid_standard_name -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self): self.emsg = "'{}' is not a valid standard_name" def test_pass_thru_none(self): name = None - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_pass_thru_empty(self): name = "" - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_pass_thru_whitespace(self): name = " " - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_valid_standard_name(self): name = "air_temperature" - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_standard_name_alias(self): name = "atmosphere_optical_thickness_due_to_pm1_ambient_aerosol" - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_invalid_standard_name(self): name = "not_a_standard_name" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): + with pytest.raises(ValueError, match=self.emsg.format(name)): _get_valid_standard_name(name) def test_valid_standard_name_valid_modifier(self): name = "air_temperature standard_error" - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_valid_standard_name_valid_modifier_extra_spaces(self): name = "air_temperature standard_error" - self.assertEqual(_get_valid_standard_name(name), name) + assert _get_valid_standard_name(name) == name def test_invalid_standard_name_valid_modifier(self): name = "not_a_standard_name standard_error" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): + with pytest.raises(ValueError, match=self.emsg.format(name)): _get_valid_standard_name(name) def test_valid_standard_invalid_name_modifier(self): name = "air_temperature extra_names standard_error" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): + with pytest.raises(ValueError, match=self.emsg.format(name)): _get_valid_standard_name(name) def test_valid_standard_valid_name_modifier_extra_names(self): name = "air_temperature standard_error extra words" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): + with pytest.raises(ValueError, match=self.emsg.format(name)): _get_valid_standard_name(name) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py index 0bad967acb..05cc0caba3 100644 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ b/lib/iris/tests/unit/common/resolve/test_Resolve.py @@ -4,17 +4,13 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.common.resolve.Resolve`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from collections import namedtuple from copy import deepcopy -import unittest.mock as mock -from unittest.mock import Mock, sentinel +from unittest import mock from cf_units import Unit import numpy as np +import pytest from iris.common.lenient import LENIENT from iris.common.metadata import CubeMetadata @@ -32,137 +28,140 @@ from iris.cube import Cube -class Test___init__(tests.IrisTest): - def setUp(self): +class Test___init__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): target = "iris.common.resolve.Resolve.__call__" - self.m_call = mock.MagicMock(return_value=sentinel.return_value) - _ = self.patch(target, new=self.m_call) + self.m_call = mocker.MagicMock(return_value=mocker.sentinel.return_value) + mocker.patch(target, new=self.m_call) def _assert_members_none(self, resolve): - self.assertIsNone(resolve.lhs_cube_resolved) - self.assertIsNone(resolve.rhs_cube_resolved) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) - self.assertIsNone(resolve.lhs_cube_dim_coverage) - self.assertIsNone(resolve.lhs_cube_aux_coverage) - self.assertIsNone(resolve.rhs_cube_dim_coverage) - self.assertIsNone(resolve.rhs_cube_aux_coverage) - self.assertIsNone(resolve.map_rhs_to_lhs) - self.assertIsNone(resolve.mapping) - self.assertIsNone(resolve.prepared_category) - self.assertIsNone(resolve.prepared_factories) - self.assertIsNone(resolve._broadcast_shape) + assert resolve.lhs_cube_resolved is None + assert resolve.rhs_cube_resolved is None + assert resolve.lhs_cube_category is None + assert resolve.rhs_cube_category is None + assert resolve.lhs_cube_category_local is None + assert resolve.rhs_cube_category_local is None + assert resolve.category_common is None + assert resolve.lhs_cube_dim_coverage is None + assert resolve.lhs_cube_aux_coverage is None + assert resolve.rhs_cube_dim_coverage is None + assert resolve.rhs_cube_aux_coverage is None + assert resolve.map_rhs_to_lhs is None + assert resolve.mapping is None + assert resolve.prepared_category is None + assert resolve.prepared_factories is None + assert resolve._broadcast_shape is None def test_lhs_rhs_default(self): resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) + assert resolve.lhs_cube is None + assert resolve.rhs_cube is None self._assert_members_none(resolve) - self.assertEqual(0, self.m_call.call_count) + assert self.m_call.call_count == 0 - def test_lhs_rhs_provided(self): - m_lhs = sentinel.lhs - m_rhs = sentinel.rhs + def test_lhs_rhs_provided(self, mocker): + m_lhs = mocker.sentinel.lhs + m_rhs = mocker.sentinel.rhs resolve = Resolve(lhs=m_lhs, rhs=m_rhs) # The lhs_cube and rhs_cube are only None due # to __call__ being mocked. See Test___call__ # for appropriate test coverage. - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) + assert resolve.lhs_cube is None + assert resolve.rhs_cube is None self._assert_members_none(resolve) - self.assertEqual(1, self.m_call.call_count) - call_args = mock.call(m_lhs, m_rhs) - self.assertEqual(call_args, self.m_call.call_args) + assert self.m_call.call_count == 1 + call_args = mocker.call(m_lhs, m_rhs) + assert self.m_call.call_args == call_args -class Test___call__(tests.IrisTest): - def setUp(self): - self.m_lhs = mock.MagicMock(spec=Cube) - self.m_rhs = mock.MagicMock(spec=Cube) +class Test___call__: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.m_lhs = mocker.MagicMock(spec=Cube) + self.m_rhs = mocker.MagicMock(spec=Cube) target = "iris.common.resolve.Resolve.{method}" method = target.format(method="_metadata_resolve") - self.m_metadata_resolve = self.patch(method) + self.m_metadata_resolve = mocker.patch(method) method = target.format(method="_metadata_coverage") - self.m_metadata_coverage = self.patch(method) + self.m_metadata_coverage = mocker.patch(method) method = target.format(method="_metadata_mapping") - self.m_metadata_mapping = self.patch(method) + self.m_metadata_mapping = mocker.patch(method) method = target.format(method="_metadata_prepare") - self.m_metadata_prepare = self.patch(method) + self.m_metadata_prepare = mocker.patch(method) def test_lhs_not_cube(self): emsg = "'LHS' argument to be a 'Cube'" - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = Resolve(rhs=self.m_rhs) def test_rhs_not_cube(self): emsg = "'RHS' argument to be a 'Cube'" - with self.assertRaisesRegex(TypeError, emsg): + with pytest.raises(TypeError, match=emsg): _ = Resolve(lhs=self.m_lhs) def _assert_called_metadata_methods(self): call_args = mock.call() - self.assertEqual(1, self.m_metadata_resolve.call_count) - self.assertEqual(call_args, self.m_metadata_resolve.call_args) - self.assertEqual(1, self.m_metadata_coverage.call_count) - self.assertEqual(call_args, self.m_metadata_coverage.call_args) - self.assertEqual(1, self.m_metadata_mapping.call_count) - self.assertEqual(call_args, self.m_metadata_mapping.call_args) - self.assertEqual(1, self.m_metadata_prepare.call_count) - self.assertEqual(call_args, self.m_metadata_prepare.call_args) + assert self.m_metadata_resolve.call_count == 1 + assert self.m_metadata_resolve.call_args == call_args + assert self.m_metadata_coverage.call_count == 1 + assert self.m_metadata_coverage.call_args == call_args + assert self.m_metadata_mapping.call_count == 1 + assert self.m_metadata_mapping.call_args == call_args + assert self.m_metadata_prepare.call_count == 1 + assert self.m_metadata_prepare.call_args == call_args def test_map_rhs_to_lhs__less_than(self): self.m_lhs.ndim = 2 self.m_rhs.ndim = 1 resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertTrue(resolve.map_rhs_to_lhs) + assert resolve.lhs_cube == self.m_lhs + assert resolve.rhs_cube == self.m_rhs + assert resolve.map_rhs_to_lhs self._assert_called_metadata_methods() def test_map_rhs_to_lhs__equal(self): self.m_lhs.ndim = 2 self.m_rhs.ndim = 2 resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertTrue(resolve.map_rhs_to_lhs) + assert resolve.lhs_cube == self.m_lhs + assert resolve.rhs_cube == self.m_rhs + assert resolve.map_rhs_to_lhs self._assert_called_metadata_methods() def test_map_lhs_to_rhs(self): self.m_lhs.ndim = 2 self.m_rhs.ndim = 3 resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertFalse(resolve.map_rhs_to_lhs) + assert resolve.lhs_cube == self.m_lhs + assert resolve.rhs_cube == self.m_rhs + assert not resolve.map_rhs_to_lhs self._assert_called_metadata_methods() -class Test__categorise_items(tests.IrisTest): - def setUp(self): +class Test__categorise_items: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.coord_dims = {} # configure dim coords - coord = mock.Mock(metadata=sentinel.dim_metadata1) + coord = mocker.Mock(metadata=mocker.sentinel.dim_metadata1) self.dim_coords = [coord] - self.coord_dims[coord] = sentinel.dims1 + self.coord_dims[coord] = mocker.sentinel.dims1 # configure aux and scalar coords self.aux_coords = [] pairs = [ - (sentinel.aux_metadata2, sentinel.dims2), - (sentinel.aux_metadata3, sentinel.dims3), - (sentinel.scalar_metadata4, None), - (sentinel.scalar_metadata5, None), - (sentinel.scalar_metadata6, None), + (mocker.sentinel.aux_metadata2, mocker.sentinel.dims2), + (mocker.sentinel.aux_metadata3, mocker.sentinel.dims3), + (mocker.sentinel.scalar_metadata4, None), + (mocker.sentinel.scalar_metadata5, None), + (mocker.sentinel.scalar_metadata6, None), ] for metadata, dims in pairs: - coord = mock.Mock(metadata=metadata) + coord = mocker.Mock(metadata=metadata) self.aux_coords.append(coord) self.coord_dims[coord] = dims func = lambda coord: self.coord_dims[coord] - self.cube = mock.Mock( + self.cube = mocker.Mock( aux_coords=self.aux_coords, dim_coords=self.dim_coords, coord_dims=func, @@ -170,19 +169,19 @@ def setUp(self): def test(self): result = Resolve._categorise_items(self.cube) - self.assertIsInstance(result, _CategoryItems) - self.assertEqual(1, len(result.items_dim)) + assert isinstance(result, _CategoryItems) + assert len(result.items_dim) == 1 # check dim coords for item in result.items_dim: - self.assertIsInstance(item, _Item) + assert isinstance(item, _Item) (coord,) = self.dim_coords dims = self.coord_dims[coord] expected = [_Item(metadata=coord.metadata, coord=coord, dims=dims)] - self.assertEqual(expected, result.items_dim) + assert result.items_dim == expected # check aux coords - self.assertEqual(2, len(result.items_aux)) + assert len(result.items_aux) == 2 for item in result.items_aux: - self.assertIsInstance(item, _Item) + assert isinstance(item, _Item) expected_aux, expected_scalar = [], [] for coord in self.aux_coords: dims = self.coord_dims[coord] @@ -191,19 +190,20 @@ def test(self): expected_aux.append(item) else: expected_scalar.append(item) - self.assertEqual(expected_aux, result.items_aux) + assert result.items_aux == expected_aux # check scalar coords - self.assertEqual(3, len(result.items_scalar)) + assert len(result.items_scalar) == 3 for item in result.items_scalar: - self.assertIsInstance(item, _Item) - self.assertEqual(expected_scalar, result.items_scalar) + assert isinstance(item, _Item) + assert result.items_scalar == expected_scalar -class Test__metadata_resolve(tests.IrisTest): - def setUp(self): +class Test__metadata_resolve: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.target = "iris.common.resolve.Resolve._categorise_items" - self.m_lhs_cube = sentinel.lhs_cube - self.m_rhs_cube = sentinel.rhs_cube + self.m_lhs_cube = mocker.sentinel.lhs_cube + self.m_rhs_cube = mocker.sentinel.rhs_cube @staticmethod def _create_items(pairs): @@ -219,72 +219,72 @@ def _create_items(pairs): result.append(item) return result - def test_metadata_same(self): + def test_metadata_same(self, mocker): category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords - pairs = [(sentinel.dim_metadata1, sentinel.dims1)] + pairs = [(mocker.sentinel.dim_metadata1, mocker.sentinel.dims1)] category.items_dim.extend(self._create_items(pairs)) # configure aux coords pairs = [ - (sentinel.aux_metadata1, sentinel.dims2), - (sentinel.aux_metadata2, sentinel.dims3), + (mocker.sentinel.aux_metadata1, mocker.sentinel.dims2), + (mocker.sentinel.aux_metadata2, mocker.sentinel.dims3), ] category.items_aux.extend(self._create_items(pairs)) # configure scalar coords pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), - (sentinel.scalar_metadata3, None), + (mocker.sentinel.scalar_metadata1, None), + (mocker.sentinel.scalar_metadata2, None), + (mocker.sentinel.scalar_metadata3, None), ] category.items_scalar.extend(self._create_items(pairs)) side_effect = (category, category) - mocker = self.patch(self.target, side_effect=side_effect) + patcher = mocker.patch(self.target, side_effect=side_effect) resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) + assert resolve.lhs_cube is None + assert resolve.rhs_cube is None + assert resolve.lhs_cube_category is None + assert resolve.rhs_cube_category is None + assert resolve.lhs_cube_category_local is None + assert resolve.rhs_cube_category_local is None + assert resolve.category_common is None # require to explicitly configure cubes resolve.lhs_cube = self.m_lhs_cube resolve.rhs_cube = self.m_rhs_cube resolve._metadata_resolve() - self.assertEqual(mocker.call_count, 2) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) + assert patcher.call_count == 2 + calls = [mocker.call(self.m_lhs_cube), mocker.call(self.m_rhs_cube)] + assert patcher.call_args_list == calls - self.assertEqual(category, resolve.lhs_cube_category) - self.assertEqual(category, resolve.rhs_cube_category) + assert resolve.lhs_cube_category == category + assert resolve.rhs_cube_category == category expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, resolve.lhs_cube_category_local) - self.assertEqual(expected, resolve.rhs_cube_category_local) - self.assertEqual(category, resolve.category_common) + assert resolve.lhs_cube_category_local == expected + assert resolve.rhs_cube_category_local == expected + assert resolve.category_common == category - def test_metadata_overlap(self): + def test_metadata_overlap(self, mocker): # configure the lhs cube category category_lhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ - (sentinel.dim_metadata1, sentinel.dims1), - (sentinel.dim_metadata2, sentinel.dims2), + (mocker.sentinel.dim_metadata1, mocker.sentinel.dims1), + (mocker.sentinel.dim_metadata2, mocker.sentinel.dims2), ] category_lhs.items_dim.extend(self._create_items(pairs)) # configure aux coords pairs = [ - (sentinel.aux_metadata1, sentinel.dims3), - (sentinel.aux_metadata2, sentinel.dims4), + (mocker.sentinel.aux_metadata1, mocker.sentinel.dims3), + (mocker.sentinel.aux_metadata2, mocker.sentinel.dims4), ] category_lhs.items_aux.extend(self._create_items(pairs)) # configure scalar coords pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), + (mocker.sentinel.scalar_metadata1, None), + (mocker.sentinel.scalar_metadata2, None), ] category_lhs.items_scalar.extend(self._create_items(pairs)) @@ -292,40 +292,40 @@ def test_metadata_overlap(self): category_rhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords category_rhs.items_dim.append(category_lhs.items_dim[0]) - pairs = [(sentinel.dim_metadata200, sentinel.dims2)] + pairs = [(mocker.sentinel.dim_metadata200, mocker.sentinel.dims2)] category_rhs.items_dim.extend(self._create_items(pairs)) # configure aux coords category_rhs.items_aux.append(category_lhs.items_aux[0]) - pairs = [(sentinel.aux_metadata200, sentinel.dims4)] + pairs = [(mocker.sentinel.aux_metadata200, mocker.sentinel.dims4)] category_rhs.items_aux.extend(self._create_items(pairs)) # configure scalar coords category_rhs.items_scalar.append(category_lhs.items_scalar[0]) - pairs = [(sentinel.scalar_metadata200, None)] + pairs = [(mocker.sentinel.scalar_metadata200, None)] category_rhs.items_scalar.extend(self._create_items(pairs)) side_effect = (category_lhs, category_rhs) - mocker = self.patch(self.target, side_effect=side_effect) + patcher = mocker.patch(self.target, side_effect=side_effect) resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) + assert resolve.lhs_cube is None + assert resolve.rhs_cube is None + assert resolve.lhs_cube_category is None + assert resolve.rhs_cube_category is None + assert resolve.lhs_cube_category_local is None + assert resolve.rhs_cube_category_local is None + assert resolve.category_common is None # require to explicitly configure cubes resolve.lhs_cube = self.m_lhs_cube resolve.rhs_cube = self.m_rhs_cube resolve._metadata_resolve() - self.assertEqual(2, mocker.call_count) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) + assert patcher.call_count == 2 + calls = [mocker.call(self.m_lhs_cube), mocker.call(self.m_rhs_cube)] + assert patcher.call_args_list == calls - self.assertEqual(category_lhs, resolve.lhs_cube_category) - self.assertEqual(category_rhs, resolve.rhs_cube_category) + assert resolve.lhs_cube_category == category_lhs + assert resolve.rhs_cube_category == category_rhs items_dim = [category_lhs.items_dim[1]] items_aux = [category_lhs.items_aux[1]] @@ -333,7 +333,7 @@ def test_metadata_overlap(self): expected = _CategoryItems( items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar ) - self.assertEqual(expected, resolve.lhs_cube_category_local) + assert resolve.lhs_cube_category_local == expected items_dim = [category_rhs.items_dim[1]] items_aux = [category_rhs.items_aux[1]] @@ -341,7 +341,7 @@ def test_metadata_overlap(self): expected = _CategoryItems( items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar ) - self.assertEqual(expected, resolve.rhs_cube_category_local) + assert resolve.rhs_cube_category_local == expected items_dim = [category_lhs.items_dim[0]] items_aux = [category_lhs.items_aux[0]] @@ -349,27 +349,27 @@ def test_metadata_overlap(self): expected = _CategoryItems( items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar ) - self.assertEqual(expected, resolve.category_common) + assert resolve.category_common == expected - def test_metadata_different(self): + def test_metadata_different(self, mocker): # configure the lhs cube category category_lhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ - (sentinel.dim_metadata1, sentinel.dims1), - (sentinel.dim_metadata2, sentinel.dims2), + (mocker.sentinel.dim_metadata1, mocker.sentinel.dims1), + (mocker.sentinel.dim_metadata2, mocker.sentinel.dims2), ] category_lhs.items_dim.extend(self._create_items(pairs)) # configure aux coords pairs = [ - (sentinel.aux_metadata1, sentinel.dims3), - (sentinel.aux_metadata2, sentinel.dims4), + (mocker.sentinel.aux_metadata1, mocker.sentinel.dims3), + (mocker.sentinel.aux_metadata2, mocker.sentinel.dims4), ] category_lhs.items_aux.extend(self._create_items(pairs)) # configure scalar coords pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), + (mocker.sentinel.scalar_metadata1, None), + (mocker.sentinel.scalar_metadata2, None), ] category_lhs.items_scalar.extend(self._create_items(pairs)) @@ -377,62 +377,63 @@ def test_metadata_different(self): category_rhs = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # configure dim coords pairs = [ - (sentinel.dim_metadata100, sentinel.dims1), - (sentinel.dim_metadata200, sentinel.dims2), + (mocker.sentinel.dim_metadata100, mocker.sentinel.dims1), + (mocker.sentinel.dim_metadata200, mocker.sentinel.dims2), ] category_rhs.items_dim.extend(self._create_items(pairs)) # configure aux coords pairs = [ - (sentinel.aux_metadata100, sentinel.dims3), - (sentinel.aux_metadata200, sentinel.dims4), + (mocker.sentinel.aux_metadata100, mocker.sentinel.dims3), + (mocker.sentinel.aux_metadata200, mocker.sentinel.dims4), ] category_rhs.items_aux.extend(self._create_items(pairs)) # configure scalar coords pairs = [ - (sentinel.scalar_metadata100, None), - (sentinel.scalar_metadata200, None), + (mocker.sentinel.scalar_metadata100, None), + (mocker.sentinel.scalar_metadata200, None), ] category_rhs.items_scalar.extend(self._create_items(pairs)) side_effect = (category_lhs, category_rhs) - mocker = self.patch(self.target, side_effect=side_effect) + patcher = mocker.patch(self.target, side_effect=side_effect) resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) + assert resolve.lhs_cube is None + assert resolve.rhs_cube is None + assert resolve.lhs_cube_category is None + assert resolve.rhs_cube_category is None + assert resolve.lhs_cube_category_local is None + assert resolve.rhs_cube_category_local is None + assert resolve.category_common is None # first require to explicitly lhs/rhs configure cubes resolve.lhs_cube = self.m_lhs_cube resolve.rhs_cube = self.m_rhs_cube resolve._metadata_resolve() - self.assertEqual(2, mocker.call_count) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) + assert patcher.call_count == 2 + calls = [mocker.call(self.m_lhs_cube), mocker.call(self.m_rhs_cube)] + assert patcher.call_args_list == calls - self.assertEqual(category_lhs, resolve.lhs_cube_category) - self.assertEqual(category_rhs, resolve.rhs_cube_category) - self.assertEqual(category_lhs, resolve.lhs_cube_category_local) - self.assertEqual(category_rhs, resolve.rhs_cube_category_local) + assert resolve.lhs_cube_category == category_lhs + assert resolve.rhs_cube_category == category_rhs + assert resolve.lhs_cube_category_local == category_lhs + assert resolve.rhs_cube_category_local == category_rhs expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, resolve.category_common) + assert resolve.category_common == expected -class Test__dim_coverage(tests.IrisTest): - def setUp(self): +class Test__dim_coverage: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.ndim = 4 - self.cube = mock.Mock(ndim=self.ndim) + self.cube = mocker.Mock(ndim=self.ndim) self.items = [] parts = [ - (sentinel.metadata0, sentinel.coord0, (0,)), - (sentinel.metadata1, sentinel.coord1, (1,)), - (sentinel.metadata2, sentinel.coord2, (2,)), - (sentinel.metadata3, sentinel.coord3, (3,)), + (mocker.sentinel.metadata0, mocker.sentinel.coord0, (0,)), + (mocker.sentinel.metadata1, mocker.sentinel.coord1, (1,)), + (mocker.sentinel.metadata2, mocker.sentinel.coord2, (2,)), + (mocker.sentinel.metadata3, mocker.sentinel.coord3, (3,)), ] column_parts = [x for x in zip(*parts)] self.metadata, self.coords, self.dims = [list(x) for x in column_parts] @@ -445,70 +446,75 @@ def test_coverage_no_local_no_common_all_free(self): items = [] common = [] result = Resolve._dim_coverage(self.cube, items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) + assert isinstance(result, _DimCoverage) + assert result.cube == self.cube expected = [None] * self.ndim - self.assertEqual(expected, result.metadata) - self.assertEqual(expected, result.coords) - self.assertEqual([], result.dims_common) - self.assertEqual([], result.dims_local) + assert result.metadata == expected + assert result.coords == expected + assert result.dims_common == [] + assert result.dims_local == [] expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_free) + assert result.dims_free == expected def test_coverage_all_local_no_common_no_free(self): common = [] result = Resolve._dim_coverage(self.cube, self.items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.metadata, result.metadata) - self.assertEqual(self.coords, result.coords) - self.assertEqual([], result.dims_common) - self.assertEqual(self.dims, result.dims_local) - self.assertEqual([], result.dims_free) + assert isinstance(result, _DimCoverage) + assert result.cube == self.cube + assert result.metadata == self.metadata + assert result.coords == self.coords + assert result.dims_common == [] + assert result.dims_local == self.dims + assert result.dims_free == [] def test_coverage_no_local_all_common_no_free(self): result = Resolve._dim_coverage(self.cube, self.items, self.metadata) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.metadata, result.metadata) - self.assertEqual(self.coords, result.coords) - self.assertEqual(self.dims, result.dims_common) - self.assertEqual([], result.dims_local) - self.assertEqual([], result.dims_free) - - def test_coverage_mixed(self): + assert isinstance(result, _DimCoverage) + assert result.cube == self.cube + assert result.metadata == self.metadata + assert result.coords == self.coords + assert result.dims_common == self.dims + assert result.dims_local == [] + assert result.dims_free == [] + + def test_coverage_mixed(self, mocker): common = [self.items[1].metadata, self.items[2].metadata] self.items.pop(0) self.items.pop(-1) - metadata, coord, dims = sentinel.metadata100, sentinel.coord100, (0,) + metadata, coord, dims = ( + mocker.sentinel.metadata100, + mocker.sentinel.coord100, + (0,), + ) self.items.append(_Item(metadata=metadata, coord=coord, dims=dims)) result = Resolve._dim_coverage(self.cube, self.items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) + assert isinstance(result, _DimCoverage) + assert result.cube == self.cube expected = [ metadata, self.items[0].metadata, self.items[1].metadata, None, ] - self.assertEqual(expected, result.metadata) + assert result.metadata == expected expected = [coord, self.items[0].coord, self.items[1].coord, None] - self.assertEqual(expected, result.coords) - self.assertEqual([1, 2], result.dims_common) - self.assertEqual([0], result.dims_local) - self.assertEqual([3], result.dims_free) + assert result.coords == expected + assert result.dims_common == [1, 2] + assert result.dims_local == [0] + assert result.dims_free == [3] -class Test__aux_coverage(tests.IrisTest): - def setUp(self): +class Test__aux_coverage: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.ndim = 4 - self.cube = mock.Mock(ndim=self.ndim) + self.cube = mocker.Mock(ndim=self.ndim) # configure aux coords self.items_aux = [] aux_parts = [ - (sentinel.aux_metadata0, sentinel.aux_coord0, (0,)), - (sentinel.aux_metadata1, sentinel.aux_coord1, (1,)), - (sentinel.aux_metadata23, sentinel.aux_coord23, (2, 3)), + (mocker.sentinel.aux_metadata0, mocker.sentinel.aux_coord0, (0,)), + (mocker.sentinel.aux_metadata1, mocker.sentinel.aux_coord1, (1,)), + (mocker.sentinel.aux_metadata23, mocker.sentinel.aux_coord23, (2, 3)), ] column_aux_parts = [x for x in zip(*aux_parts)] self.aux_metadata, self.aux_coords, self.aux_dims = [ @@ -520,9 +526,9 @@ def setUp(self): # configure scalar coords self.items_scalar = [] scalar_parts = [ - (sentinel.scalar_metadata0, sentinel.scalar_coord0, ()), - (sentinel.scalar_metadata1, sentinel.scalar_coord1, ()), - (sentinel.scalar_metadata2, sentinel.scalar_coord2, ()), + (mocker.sentinel.scalar_metadata0, mocker.sentinel.scalar_coord0, ()), + (mocker.sentinel.scalar_metadata1, mocker.sentinel.scalar_coord1, ()), + (mocker.sentinel.scalar_metadata2, mocker.sentinel.scalar_coord2, ()), ] column_scalar_parts = [x for x in zip(*scalar_parts)] self.scalar_metadata, self.scalar_coords, self.scalar_dims = [ @@ -538,16 +544,16 @@ def test_coverage_no_local_no_common_all_free(self): result = Resolve._aux_coverage( self.cube, items_aux, items_scalar, common_aux, common_scalar ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual([], result.common_items_aux) - self.assertEqual([], result.common_items_scalar) - self.assertEqual([], result.local_items_aux) - self.assertEqual([], result.local_items_scalar) - self.assertEqual([], result.dims_common) - self.assertEqual([], result.dims_local) + assert isinstance(result, _AuxCoverage) + assert result.cube == self.cube + assert result.common_items_aux == [] + assert result.common_items_scalar == [] + assert result.local_items_aux == [] + assert result.local_items_scalar == [] + assert result.dims_common == [] + assert result.dims_local == [] expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_free) + assert result.dims_free == expected def test_coverage_all_local_no_common_no_free(self): common_aux, common_scalar = [], [] @@ -558,17 +564,16 @@ def test_coverage_all_local_no_common_no_free(self): common_aux, common_scalar, ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - expected = [] - self.assertEqual(expected, result.common_items_aux) - self.assertEqual(expected, result.common_items_scalar) - self.assertEqual(self.items_aux, result.local_items_aux) - self.assertEqual(self.items_scalar, result.local_items_scalar) - self.assertEqual([], result.dims_common) + assert isinstance(result, _AuxCoverage) + assert result.cube == self.cube + assert result.common_items_aux == [] + assert result.common_items_scalar == [] + assert result.local_items_aux == self.items_aux + assert result.local_items_scalar == self.items_scalar + assert result.dims_common == [] expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_local) - self.assertEqual([], result.dims_free) + assert result.dims_local == expected + assert result.dims_free == [] def test_coverage_no_local_all_common_no_free(self): result = Resolve._aux_coverage( @@ -578,16 +583,16 @@ def test_coverage_no_local_all_common_no_free(self): self.aux_metadata, self.scalar_metadata, ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.items_aux, result.common_items_aux) - self.assertEqual(self.items_scalar, result.common_items_scalar) - self.assertEqual([], result.local_items_aux) - self.assertEqual([], result.local_items_scalar) + assert isinstance(result, _AuxCoverage) + assert result.cube == self.cube + assert result.common_items_aux == self.items_aux + assert result.common_items_scalar == self.items_scalar + assert result.local_items_aux == [] + assert result.local_items_scalar == [] expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_common) - self.assertEqual([], result.dims_local) - self.assertEqual([], result.dims_free) + assert result.dims_common == expected + assert result.dims_local == [] + assert result.dims_free == [] def test_coverage_mixed(self): common_aux = [self.items_aux[-1].metadata] @@ -600,41 +605,42 @@ def test_coverage_mixed(self): common_aux, common_scalar, ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) + assert isinstance(result, _AuxCoverage) + assert result.cube == self.cube expected = [self.items_aux[-1]] - self.assertEqual(expected, result.common_items_aux) + assert result.common_items_aux == expected expected = [self.items_scalar[1]] - self.assertEqual(expected, result.common_items_scalar) + assert result.common_items_scalar == expected expected = [self.items_aux[0]] - self.assertEqual(expected, result.local_items_aux) + assert result.local_items_aux == expected expected = [self.items_scalar[0], self.items_scalar[2]] - self.assertEqual(expected, result.local_items_scalar) - self.assertEqual([2, 3], result.dims_common) - self.assertEqual([0], result.dims_local) - self.assertEqual([1], result.dims_free) + assert result.local_items_scalar == expected + assert result.dims_common == [2, 3] + assert result.dims_local == [0] + assert result.dims_free == [1] -class Test__metadata_coverage(tests.IrisTest): - def setUp(self): +class Test__metadata_coverage: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.m_lhs_cube = sentinel.lhs_cube + self.m_lhs_cube = mocker.sentinel.lhs_cube self.resolve.lhs_cube = self.m_lhs_cube - self.m_rhs_cube = sentinel.rhs_cube + self.m_rhs_cube = mocker.sentinel.rhs_cube self.resolve.rhs_cube = self.m_rhs_cube - self.m_items_dim_metadata = sentinel.items_dim_metadata - self.m_items_aux_metadata = sentinel.items_aux_metadata - self.m_items_scalar_metadata = sentinel.items_scalar_metadata - items_dim = [mock.Mock(metadata=self.m_items_dim_metadata)] - items_aux = [mock.Mock(metadata=self.m_items_aux_metadata)] - items_scalar = [mock.Mock(metadata=self.m_items_scalar_metadata)] + self.m_items_dim_metadata = mocker.sentinel.items_dim_metadata + self.m_items_aux_metadata = mocker.sentinel.items_aux_metadata + self.m_items_scalar_metadata = mocker.sentinel.items_scalar_metadata + items_dim = [mocker.Mock(metadata=self.m_items_dim_metadata)] + items_aux = [mocker.Mock(metadata=self.m_items_aux_metadata)] + items_scalar = [mocker.Mock(metadata=self.m_items_scalar_metadata)] category = _CategoryItems( items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar ) self.resolve.category_common = category - self.m_items_dim = sentinel.items_dim - self.m_items_aux = sentinel.items_aux - self.m_items_scalar = sentinel.items_scalar + self.m_items_dim = mocker.sentinel.items_dim + self.m_items_aux = mocker.sentinel.items_aux + self.m_items_scalar = mocker.sentinel.items_scalar category = _CategoryItems( items_dim=self.m_items_dim, items_aux=self.m_items_aux, @@ -643,40 +649,40 @@ def setUp(self): self.resolve.lhs_cube_category = category self.resolve.rhs_cube_category = category target = "iris.common.resolve.Resolve._dim_coverage" - self.m_lhs_cube_dim_coverage = sentinel.lhs_cube_dim_coverage - self.m_rhs_cube_dim_coverage = sentinel.rhs_cube_dim_coverage + self.m_lhs_cube_dim_coverage = mocker.sentinel.lhs_cube_dim_coverage + self.m_rhs_cube_dim_coverage = mocker.sentinel.rhs_cube_dim_coverage side_effect = ( self.m_lhs_cube_dim_coverage, self.m_rhs_cube_dim_coverage, ) - self.mocker_dim_coverage = self.patch(target, side_effect=side_effect) + self.mocker_dim_coverage = mocker.patch(target, side_effect=side_effect) target = "iris.common.resolve.Resolve._aux_coverage" - self.m_lhs_cube_aux_coverage = sentinel.lhs_cube_aux_coverage - self.m_rhs_cube_aux_coverage = sentinel.rhs_cube_aux_coverage + self.m_lhs_cube_aux_coverage = mocker.sentinel.lhs_cube_aux_coverage + self.m_rhs_cube_aux_coverage = mocker.sentinel.rhs_cube_aux_coverage side_effect = ( self.m_lhs_cube_aux_coverage, self.m_rhs_cube_aux_coverage, ) - self.mocker_aux_coverage = self.patch(target, side_effect=side_effect) + self.mocker_aux_coverage = mocker.patch(target, side_effect=side_effect) - def test(self): + def test(self, mocker): self.resolve._metadata_coverage() - self.assertEqual(2, self.mocker_dim_coverage.call_count) + assert self.mocker_dim_coverage.call_count == 2 calls = [ - mock.call(self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), - mock.call(self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), + mocker.call(self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), + mocker.call(self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata]), ] - self.assertEqual(calls, self.mocker_dim_coverage.call_args_list) - self.assertEqual(2, self.mocker_aux_coverage.call_count) + assert self.mocker_dim_coverage.call_args_list == calls + assert self.mocker_aux_coverage.call_count == 2 calls = [ - mock.call( + mocker.call( self.m_lhs_cube, self.m_items_aux, self.m_items_scalar, [self.m_items_aux_metadata], [self.m_items_scalar_metadata], ), - mock.call( + mocker.call( self.m_rhs_cube, self.m_items_aux, self.m_items_scalar, @@ -684,26 +690,19 @@ def test(self): [self.m_items_scalar_metadata], ), ] - self.assertEqual(calls, self.mocker_aux_coverage.call_args_list) - self.assertEqual( - self.m_lhs_cube_dim_coverage, self.resolve.lhs_cube_dim_coverage - ) - self.assertEqual( - self.m_rhs_cube_dim_coverage, self.resolve.rhs_cube_dim_coverage - ) - self.assertEqual( - self.m_lhs_cube_aux_coverage, self.resolve.lhs_cube_aux_coverage - ) - self.assertEqual( - self.m_rhs_cube_aux_coverage, self.resolve.rhs_cube_aux_coverage - ) + assert self.mocker_aux_coverage.call_args_list == calls + assert self.resolve.lhs_cube_dim_coverage == self.m_lhs_cube_dim_coverage + assert self.resolve.rhs_cube_dim_coverage == self.m_rhs_cube_dim_coverage + assert self.resolve.lhs_cube_aux_coverage == self.m_lhs_cube_aux_coverage + assert self.resolve.rhs_cube_aux_coverage == self.m_rhs_cube_aux_coverage -class Test__dim_mapping(tests.IrisTest): - def setUp(self): +class Test__dim_mapping: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.ndim = 3 Wrapper = namedtuple("Wrapper", ("name",)) - cube = Wrapper(name=lambda: sentinel.name) + cube = Wrapper(name=lambda: mocker.sentinel.name) self.src_coverage = _DimCoverage( cube=cube, metadata=[], @@ -721,17 +720,21 @@ def setUp(self): dims_free=None, ) self.metadata = [ - sentinel.metadata_0, - sentinel.metadata_1, - sentinel.metadata_2, + mocker.sentinel.metadata_0, + mocker.sentinel.metadata_1, + mocker.sentinel.metadata_2, + ] + self.dummy = [ + mocker.sentinel.dummy_0, + mocker.sentinel.dummy_1, + mocker.sentinel.dummy_2, ] - self.dummy = [sentinel.dummy_0, sentinel.dummy_1, sentinel.dummy_2] def test_no_mapping(self): self.src_coverage.metadata.extend(self.metadata) self.tgt_coverage.metadata.extend(self.dummy) result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - self.assertEqual(dict(), result) + assert result == {} def test_full_mapping(self): self.src_coverage.metadata.extend(self.metadata) @@ -740,7 +743,7 @@ def test_full_mapping(self): self.tgt_coverage.dims_common.extend(dims_common) result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 0, 1: 1, 2: 2} - self.assertEqual(expected, result) + assert result == expected def test_transpose_mapping(self): self.src_coverage.metadata.extend(self.metadata[::-1]) @@ -749,34 +752,35 @@ def test_transpose_mapping(self): self.tgt_coverage.dims_common.extend(dims_common) result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 2, 1: 1, 2: 0} - self.assertEqual(expected, result) + assert result == expected - def test_partial_mapping__transposed(self): + def test_partial_mapping__transposed(self, mocker): self.src_coverage.metadata.extend(self.metadata) - self.metadata[1] = sentinel.nope + self.metadata[1] = mocker.sentinel.nope self.tgt_coverage.metadata.extend(self.metadata[::-1]) dims_common = [0, 2] self.tgt_coverage.dims_common.extend(dims_common) result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 2, 2: 0} - self.assertEqual(expected, result) + assert result == expected - def test_bad_metadata_mapping(self): + def test_bad_metadata_mapping(self, mocker): self.src_coverage.metadata.extend(self.metadata) - self.metadata[0] = sentinel.bad + self.metadata[0] = mocker.sentinel.bad self.tgt_coverage.metadata.extend(self.metadata) dims_common = [0] self.tgt_coverage.dims_common.extend(dims_common) emsg = "Failed to map common dim coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) -class Test__aux_mapping(tests.IrisTest): - def setUp(self): +class Test__aux_mapping: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.ndim = 3 Wrapper = namedtuple("Wrapper", ("name",)) - cube = Wrapper(name=lambda: sentinel.name) + cube = Wrapper(name=lambda: mocker.sentinel.name) self.src_coverage = _AuxCoverage( cube=cube, common_items_aux=[], @@ -798,21 +802,33 @@ def setUp(self): dims_free=None, ) self.items = [ - _Item(metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0]), - _Item(metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1]), - _Item(metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2]), + _Item( + metadata=mocker.sentinel.metadata0, + coord=mocker.sentinel.coord0, + dims=[0], + ), + _Item( + metadata=mocker.sentinel.metadata1, + coord=mocker.sentinel.coord1, + dims=[1], + ), + _Item( + metadata=mocker.sentinel.metadata2, + coord=mocker.sentinel.coord2, + dims=[2], + ), ] def test_no_mapping(self): result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - self.assertEqual(dict(), result) + assert result == {} def test_full_mapping(self): self.src_coverage.common_items_aux.extend(self.items) self.tgt_coverage.common_items_aux.extend(self.items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 0, 1: 1, 2: 2} - self.assertEqual(expected, result) + assert result == expected def test_transpose_mapping(self): self.src_coverage.common_items_aux.extend(self.items) @@ -822,7 +838,7 @@ def test_transpose_mapping(self): self.tgt_coverage.common_items_aux.extend(items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 2, 1: 1, 2: 0} - self.assertEqual(expected, result) + assert result == expected def test_partial_mapping__transposed(self): _ = self.items.pop(1) @@ -833,7 +849,7 @@ def test_partial_mapping__transposed(self): self.tgt_coverage.common_items_aux.extend(items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 2, 2: 0} - self.assertEqual(expected, result) + assert result == expected def test_mapping__match_multiple_src_metadata(self): items = deepcopy(self.items) @@ -843,7 +859,7 @@ def test_mapping__match_multiple_src_metadata(self): self.tgt_coverage.common_items_aux.extend(items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 0, 2: 2} - self.assertEqual(expected, result) + assert result == expected def test_mapping__skip_match_multiple_src_metadata(self): items = deepcopy(self.items) @@ -853,7 +869,7 @@ def test_mapping__skip_match_multiple_src_metadata(self): self.src_coverage.common_items_aux.extend(items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {2: 2} - self.assertEqual(expected, result) + assert result == expected def test_mapping__skip_different_rank(self): items = deepcopy(self.items) @@ -862,57 +878,58 @@ def test_mapping__skip_different_rank(self): self.tgt_coverage.common_items_aux.extend(items) result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) expected = {0: 0, 1: 1} - self.assertEqual(expected, result) + assert result == expected - def test_bad_metadata_mapping(self): + def test_bad_metadata_mapping(self, mocker): self.src_coverage.common_items_aux.extend(self.items) items = deepcopy(self.items) - items[0] = items[0]._replace(metadata=sentinel.bad) + items[0] = items[0]._replace(metadata=mocker.sentinel.bad) self.tgt_coverage.common_items_aux.extend(items) emsg = "Failed to map common aux coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) -class Test_mapped(tests.IrisTest): +class Test_mapped: def test_mapping_none(self): resolve = Resolve() - self.assertIsNone(resolve.mapping) - self.assertIsNone(resolve.mapped) + assert resolve.mapping is None + assert resolve.mapped is None - def test_mapped__src_cube_lhs(self): + def test_mapped__src_cube_lhs(self, mocker): resolve = Resolve() - lhs = mock.Mock(ndim=2) - rhs = mock.Mock(ndim=3) + lhs = mocker.Mock(ndim=2) + rhs = mocker.Mock(ndim=3) resolve.lhs_cube = lhs resolve.rhs_cube = rhs resolve.map_rhs_to_lhs = False resolve.mapping = {0: 0, 1: 1} - self.assertTrue(resolve.mapped) + assert resolve.mapped - def test_mapped__src_cube_rhs(self): + def test_mapped__src_cube_rhs(self, mocker): resolve = Resolve() - lhs = mock.Mock(ndim=3) - rhs = mock.Mock(ndim=2) + lhs = mocker.Mock(ndim=3) + rhs = mocker.Mock(ndim=2) resolve.lhs_cube = lhs resolve.rhs_cube = rhs resolve.map_rhs_to_lhs = True resolve.mapping = {0: 0, 1: 1} - self.assertTrue(resolve.mapped) + assert resolve.mapped - def test_partial_mapping(self): + def test_partial_mapping(self, mocker): resolve = Resolve() - lhs = mock.Mock(ndim=3) - rhs = mock.Mock(ndim=2) + lhs = mocker.Mock(ndim=3) + rhs = mocker.Mock(ndim=2) resolve.lhs_cube = lhs resolve.rhs_cube = rhs resolve.map_rhs_to_lhs = True resolve.mapping = {0: 0} - self.assertFalse(resolve.mapped) + assert not resolve.mapped -class Test__free_mapping(tests.IrisTest): - def setUp(self): +class Test__free_mapping: + @pytest.fixture(autouse=True) + def _setup(self): self.Cube = namedtuple("Wrapper", ("name", "ndim", "shape")) self.src_dim_coverage = dict( cube=None, @@ -955,7 +972,7 @@ def test_mapping_no_dims_free(self): self.tgt_dim_coverage["cube"] = cube args = self._make_args() emsg = "Insufficient matching coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.resolve._free_mapping(**args) def _make_coverage(self, name, shape, dims_free): @@ -995,7 +1012,7 @@ def test_mapping_src_free_to_tgt_local(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_local__broadcast_src_first(self): # key: (state) c=common, f=free, l=local @@ -1021,7 +1038,7 @@ def test_mapping_src_free_to_tgt_local__broadcast_src_first(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_local__broadcast_src_last(self): # key: (state) c=common, f=free, l=local @@ -1047,7 +1064,7 @@ def test_mapping_src_free_to_tgt_local__broadcast_src_last(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_local__broadcast_src_both(self): # key: (state) c=common, f=free, l=local @@ -1073,7 +1090,7 @@ def test_mapping_src_free_to_tgt_local__broadcast_src_both(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 1, 1: 2, 2: 3} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_free(self): # key: (state) c=common, f=free, l=local @@ -1098,7 +1115,7 @@ def test_mapping_src_free_to_tgt_free(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_free__broadcast_src_first(self): # key: (state) c=common, f=free, l=local @@ -1124,7 +1141,7 @@ def test_mapping_src_free_to_tgt_free__broadcast_src_first(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_free__broadcast_src_last(self): # key: (state) c=common, f=free, l=local @@ -1150,7 +1167,7 @@ def test_mapping_src_free_to_tgt_free__broadcast_src_last(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt_free__broadcast_src_both(self): # key: (state) c=common, f=free, l=local @@ -1176,7 +1193,7 @@ def test_mapping_src_free_to_tgt_free__broadcast_src_both(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_src_free_to_tgt__fail(self): # key: (state) c=common, f=free, l=local @@ -1201,7 +1218,7 @@ def test_mapping_src_free_to_tgt__fail(self): self.resolve.mapping = {1: 2} args = self._make_args() emsg = "Insufficient matching coordinate metadata to resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.resolve._free_mapping(**args) def test_mapping_tgt_free_to_src_local(self): @@ -1227,7 +1244,7 @@ def test_mapping_tgt_free_to_src_local(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_tgt_free_to_src_local__broadcast_tgt_first(self): # key: (state) c=common, f=free, l=local @@ -1253,7 +1270,7 @@ def test_mapping_tgt_free_to_src_local__broadcast_tgt_first(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_tgt_free_to_src_local__broadcast_tgt_last(self): # key: (state) c=common, f=free, l=local @@ -1279,7 +1296,7 @@ def test_mapping_tgt_free_to_src_local__broadcast_tgt_last(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_tgt_free_to_src_local__broadcast_tgt_both(self): # key: (state) c=common, f=free, l=local @@ -1305,7 +1322,7 @@ def test_mapping_tgt_free_to_src_local__broadcast_tgt_both(self): args = self._make_args() self.resolve._free_mapping(**args) expected = {0: 1, 1: 2, 2: 3} - self.assertEqual(expected, self.resolve.mapping) + assert self.resolve.mapping == expected def test_mapping_tgt_free_to_src_no_free__fail(self): # key: (state) c=common, f=free, l=local @@ -1330,179 +1347,189 @@ def test_mapping_tgt_free_to_src_no_free__fail(self): self.resolve.mapping = {1: 2} args = self._make_args() emsg = "Insufficient matching coordinate metadata to resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.resolve._free_mapping(**args) -class Test__src_cube(tests.IrisTest): - def setUp(self): +class Test__src_cube: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve.rhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._src_cube) + assert self.resolve._src_cube == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve.lhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._src_cube) + assert self.resolve._src_cube == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._src_cube -class Test__src_cube_position(tests.IrisTest): - def setUp(self): +class Test__src_cube_position: + @pytest.fixture(autouse=True) + def _setup(self): self.resolve = Resolve() def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = True - self.assertEqual("RHS", self.resolve._src_cube_position) + assert self.resolve._src_cube_position == "RHS" def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = False - self.assertEqual("LHS", self.resolve._src_cube_position) + assert self.resolve._src_cube_position == "LHS" def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._src_cube_position -class Test__src_cube_resolved__getter(tests.IrisTest): - def setUp(self): +class Test__src_cube_resolved__getter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve.rhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._src_cube_resolved) + assert self.resolve._src_cube_resolved == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve.lhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._src_cube_resolved) + assert self.resolve._src_cube_resolved == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._src_cube_resolved -class Test__src_cube_resolved__setter(tests.IrisTest): - def setUp(self): +class Test__src_cube_resolved__setter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve._src_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) + assert self.resolve.rhs_cube_resolved == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve._src_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) + assert self.resolve.lhs_cube_resolved == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._src_cube_resolved = self.expected -class Test__tgt_cube(tests.IrisTest): - def setUp(self): +class Test__tgt_cube: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve.rhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube) + assert self.resolve._tgt_cube == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve.lhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube) + assert self.resolve._tgt_cube == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._tgt_cube -class Test__tgt_cube_position(tests.IrisTest): - def setUp(self): +class Test__tgt_cube_position: + @pytest.fixture(autouse=True) + def _setup(self): self.resolve = Resolve() def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = False - self.assertEqual("RHS", self.resolve._tgt_cube_position) + assert self.resolve._tgt_cube_position == "RHS" def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = True - self.assertEqual("LHS", self.resolve._tgt_cube_position) + assert self.resolve._tgt_cube_position == "LHS" def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._tgt_cube_position -class Test__tgt_cube_resolved__getter(tests.IrisTest): - def setUp(self): +class Test__tgt_cube_resolved__getter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve.rhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) + assert self.resolve._tgt_cube_resolved == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve.lhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) + assert self.resolve._tgt_cube_resolved == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._tgt_cube_resolved -class Test__tgt_cube_resolved__setter(tests.IrisTest): - def setUp(self): +class Test__tgt_cube_resolved__setter: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.resolve = Resolve() - self.expected = sentinel.cube + self.expected = mocker.sentinel.cube def test_rhs_cube(self): self.resolve.map_rhs_to_lhs = False self.resolve._tgt_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) + assert self.resolve.rhs_cube_resolved == self.expected def test_lhs_cube(self): self.resolve.map_rhs_to_lhs = True self.resolve._tgt_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) + assert self.resolve.lhs_cube_resolved == self.expected def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._tgt_cube_resolved = self.expected -class Test_shape(tests.IrisTest): - def setUp(self): +class Test_shape: + @pytest.fixture(autouse=True) + def _setup(self): self.resolve = Resolve() def test_no_shape(self): - self.assertIsNone(self.resolve.shape) + assert self.resolve.shape is None - def test_shape(self): - expected = sentinel.shape + def test_shape(self, mocker): + expected = mocker.sentinel.shape self.resolve._broadcast_shape = expected - self.assertEqual(expected, self.resolve.shape) + assert self.resolve.shape == expected -class Test__as_compatible_cubes(tests.IrisTest): - def setUp(self): +class Test__as_compatible_cubes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Cube = namedtuple( "Wrapper", ( @@ -1520,7 +1547,7 @@ def setUp(self): self.resolve = Resolve() self.resolve.map_rhs_to_lhs = True self.resolve.mapping = {} - self.mocker = self.patch("iris.cube.Cube") + self.mocker = mocker.patch("iris.cube.Cube") self.args = dict( name=None, ndim=None, @@ -1539,8 +1566,8 @@ def _make_cube(self, name, shape, transpose_shape=None): self.args["ndim"] = ndim self.args["shape"] = shape if name == "src": - self.args["metadata"] = sentinel.metadata - self.reshape = sentinel.reshape + self.args["metadata"] = mock.sentinel.metadata + self.reshape = mock.sentinel.reshape m_reshape = mock.Mock(return_value=self.reshape) self.transpose = mock.Mock(shape=transpose_shape, reshape=m_reshape) m_transpose = mock.Mock(return_value=self.transpose) @@ -1549,9 +1576,9 @@ def _make_cube(self, name, shape, transpose_shape=None): m_core_data = mock.Mock(copy=m_copy) self.args["core_data"] = mock.Mock(return_value=m_core_data) self.args["coord_dims"] = mock.Mock(side_effect=([0], [ndim - 1])) - self.dim_coord = sentinel.dim_coord - self.aux_coord = sentinel.aux_coord - self.aux_factory = sentinel.aux_factory + self.dim_coord = mock.sentinel.dim_coord + self.aux_coord = mock.sentinel.aux_coord + self.aux_factory = mock.sentinel.aux_factory self.args["dim_coords"] = [self.dim_coord] self.args["aux_coords"] = [self.aux_coord] self.args["aux_factories"] = [self.aux_factory] @@ -1568,7 +1595,7 @@ def test_incomplete_src_to_tgt_mapping__fail(self): self._make_cube("src", src_shape) tgt_shape = (3, 4) self._make_cube("tgt", tgt_shape) - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): self.resolve._as_compatible_cubes() def test_incompatible_shapes__fail(self): @@ -1588,7 +1615,7 @@ def test_incompatible_shapes__fail(self): self._make_cube("tgt", tgt_shape) self.resolve.mapping = {0: 1, 1: 2, 2: 3} emsg = "Cannot resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.resolve._as_compatible_cubes() def test_incompatible_shapes__fail_broadcast(self): @@ -1608,37 +1635,32 @@ def test_incompatible_shapes__fail_broadcast(self): self._make_cube("tgt", tgt_shape) self.resolve.mapping = {0: 3, 1: 2, 2: 1} emsg = "Cannot resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): self.resolve._as_compatible_cubes() def _check_compatible(self, broadcast_shape): - self.assertEqual(self.resolve.lhs_cube, self.resolve._tgt_cube_resolved) - self.assertEqual(self.cube, self.resolve._src_cube_resolved) - self.assertEqual(broadcast_shape, self.resolve._broadcast_shape) - self.assertEqual(1, self.mocker.call_count) - self.assertEqual(self.args["metadata"], self.cube.metadata) - self.assertEqual(2, self.resolve.rhs_cube.coord_dims.call_count) - self.assertEqual( - [mock.call(self.dim_coord), mock.call(self.aux_coord)], - self.resolve.rhs_cube.coord_dims.call_args_list, - ) - self.assertEqual(1, self.cube.add_dim_coord.call_count) - self.assertEqual( - [mock.call(self.dim_coord, [self.resolve.mapping[0]])], - self.cube.add_dim_coord.call_args_list, - ) - self.assertEqual(1, self.cube.add_aux_coord.call_count) - self.assertEqual( - [mock.call(self.aux_coord, [self.resolve.mapping[2]])], - self.cube.add_aux_coord.call_args_list, - ) - self.assertEqual(1, self.cube.add_aux_factory.call_count) - self.assertEqual( - [mock.call(self.aux_factory)], - self.cube.add_aux_factory.call_args_list, - ) - - def test_compatible(self): + assert self.resolve.lhs_cube == self.resolve._tgt_cube_resolved + assert self.resolve._src_cube_resolved == self.cube + assert self.resolve._broadcast_shape == broadcast_shape + assert self.mocker.call_count == 1 + assert self.args["metadata"] == self.cube.metadata + assert self.resolve.rhs_cube.coord_dims.call_count == 2 + assert self.resolve.rhs_cube.coord_dims.call_args_list == [ + mock.call(self.dim_coord), + mock.call(self.aux_coord), + ] + assert self.cube.add_dim_coord.call_count == 1 + assert self.cube.add_dim_coord.call_args_list == [ + mock.call(self.dim_coord, [self.resolve.mapping[0]]) + ] + assert self.cube.add_aux_coord.call_count == 1 + assert self.cube.add_aux_coord.call_args_list == [ + mock.call(self.aux_coord, [self.resolve.mapping[2]]) + ] + assert self.cube.add_aux_factory.call_count == 1 + assert self.cube.add_aux_factory.call_args_list == [mock.call(self.aux_factory)] + + def test_compatible(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1658,9 +1680,9 @@ def test_compatible(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) + assert self.mocker.call_args_list == [mocker.call(self.data)] - def test_compatible__transpose(self): + def test_compatible__transpose(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1680,11 +1702,11 @@ def test_compatible__transpose(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) - self.assertEqual([mock.call(self.transpose)], self.mocker.call_args_list) + assert self.data.transpose.call_count == 1 + assert self.data.transpose.call_args_list == [mocker.call([2, 1, 0])] + assert self.mocker.call_args_list == [mocker.call(self.transpose)] - def test_compatible__reshape(self): + def test_compatible__reshape(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1704,13 +1726,11 @@ def test_compatible__reshape(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + src_shape)], self.data.reshape.call_args_list - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) + assert self.data.reshape.call_count == 1 + assert self.data.reshape.call_args_list == [mocker.call((1,) + src_shape)] + assert self.mocker.call_args_list == [mocker.call(self.reshape)] - def test_compatible__transpose_reshape(self): + def test_compatible__transpose_reshape(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1731,16 +1751,13 @@ def test_compatible__transpose_reshape(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + transpose_shape)], - self.data.reshape.call_args_list, - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) + assert self.data.transpose.call_count == 1 + assert self.data.transpose.call_args_list == [mocker.call([2, 1, 0])] + assert self.data.reshape.call_count == 1 + assert self.data.reshape.call_args_list == [mocker.call((1,) + transpose_shape)] + assert self.mocker.call_args_list == [mocker.call(self.reshape)] - def test_compatible__broadcast(self): + def test_compatible__broadcast(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1761,9 +1778,9 @@ def test_compatible__broadcast(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=(4, 3, 2)) - self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) + assert self.mocker.call_args_list == [mocker.call(self.data)] - def test_compatible__broadcast_transpose_reshape(self): + def test_compatible__broadcast_transpose_reshape(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1785,25 +1802,23 @@ def test_compatible__broadcast_transpose_reshape(self): self.resolve.mapping = mapping self.resolve._as_compatible_cubes() self._check_compatible(broadcast_shape=(5, 4, 3, 2)) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual([mock.call([2, 1, 0])], self.data.transpose.call_args_list) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + transpose_shape)], - self.data.reshape.call_args_list, - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) - - -class Test__metadata_mapping(tests.IrisTest): - def setUp(self): - self.ndim = sentinel.ndim - self.src_cube = mock.Mock(ndim=self.ndim) - self.src_dim_coverage = mock.Mock(dims_free=[]) - self.src_aux_coverage = mock.Mock(dims_free=[]) - self.tgt_cube = mock.Mock(ndim=self.ndim) - self.tgt_dim_coverage = mock.Mock(dims_free=[]) - self.tgt_aux_coverage = mock.Mock(dims_free=[]) + assert self.data.transpose.call_count == 1 + assert self.data.transpose.call_args_list == [mocker.call([2, 1, 0])] + assert self.data.reshape.call_count == 1 + assert self.data.reshape.call_args_list == [mocker.call((1,) + transpose_shape)] + assert self.mocker.call_args_list == [mocker.call(self.reshape)] + + +class Test__metadata_mapping: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.ndim = mocker.sentinel.ndim + self.src_cube = mocker.Mock(ndim=self.ndim) + self.src_dim_coverage = mocker.Mock(dims_free=[]) + self.src_aux_coverage = mocker.Mock(dims_free=[]) + self.tgt_cube = mocker.Mock(ndim=self.ndim) + self.tgt_dim_coverage = mocker.Mock(dims_free=[]) + self.tgt_aux_coverage = mocker.Mock(dims_free=[]) self.resolve = Resolve() self.map_rhs_to_lhs = True self.resolve.map_rhs_to_lhs = self.map_rhs_to_lhs @@ -1814,23 +1829,23 @@ def setUp(self): self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage self.resolve.mapping = {} - self.shape = sentinel.shape + self.shape = mocker.sentinel.shape self.resolve._broadcast_shape = self.shape - self.resolve._src_cube_resolved = mock.Mock(shape=self.shape) - self.resolve._tgt_cube_resolved = mock.Mock(shape=self.shape) - self.m_dim_mapping = self.patch( + self.resolve._src_cube_resolved = mocker.Mock(shape=self.shape) + self.resolve._tgt_cube_resolved = mocker.Mock(shape=self.shape) + self.m_dim_mapping = mocker.patch( "iris.common.resolve.Resolve._dim_mapping", return_value={} ) - self.m_aux_mapping = self.patch( + self.m_aux_mapping = mocker.patch( "iris.common.resolve.Resolve._aux_mapping", return_value={} ) - self.m_free_mapping = self.patch("iris.common.resolve.Resolve._free_mapping") - self.m_as_compatible_cubes = self.patch( + self.m_free_mapping = mocker.patch("iris.common.resolve.Resolve._free_mapping") + self.m_as_compatible_cubes = mocker.patch( "iris.common.resolve.Resolve._as_compatible_cubes" ) self.mapping = {0: 1, 1: 2, 2: 3} - def test_mapped__dim_coords(self): + def test_mapped__dim_coords(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1845,15 +1860,15 @@ def test_mapped__dim_coords(self): self.src_cube.ndim = 3 self.m_dim_mapping.return_value = self.mapping self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(0, self.m_aux_mapping.call_count) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__aux_coords(self): + assert self.resolve.mapping == self.mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 0 + assert self.m_free_mapping.call_count == 0 + assert self.m_as_compatible_cubes.call_count == 1 + + def test_mapped__aux_coords(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1868,17 +1883,17 @@ def test_mapped__aux_coords(self): self.src_cube.ndim = 3 self.m_aux_mapping.return_value = self.mapping self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__dim_and_aux_coords(self): + assert self.resolve.mapping == self.mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 1 + expected = [mocker.call(self.src_aux_coverage, self.tgt_aux_coverage)] + assert self.m_aux_mapping.call_args_list == expected + assert self.m_free_mapping.call_count == 0 + assert self.m_as_compatible_cubes.call_count == 1 + + def test_mapped__dim_and_aux_coords(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1896,17 +1911,17 @@ def test_mapped__dim_and_aux_coords(self): self.m_dim_mapping.return_value = dim_mapping self.m_aux_mapping.return_value = aux_mapping self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__dim_coords_and_free_dims(self): + assert self.resolve.mapping == self.mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 1 + expected = [mocker.call(self.src_aux_coverage, self.tgt_aux_coverage)] + assert self.m_aux_mapping.call_args_list == expected + assert self.m_free_mapping.call_count == 0 + assert self.m_as_compatible_cubes.call_count == 1 + + def test_mapped__dim_coords_and_free_dims(self, mocker): # key: (state) c=common, f=free, l=local # (coord) a=aux, d=dim # @@ -1925,26 +1940,26 @@ def test_mapped__dim_coords_and_free_dims(self): side_effect = lambda a, b, c, d: self.resolve.mapping.update(free_mapping) self.m_free_mapping.side_effect = side_effect self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(1, self.m_free_mapping.call_count) + assert self.resolve.mapping == self.mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 1 + expected = [mocker.call(self.src_aux_coverage, self.tgt_aux_coverage)] + assert self.m_aux_mapping.call_args_list == expected + assert self.m_free_mapping.call_count == 1 expected = [ - mock.call( + mocker.call( self.src_dim_coverage, self.tgt_dim_coverage, self.src_aux_coverage, self.tgt_aux_coverage, ) ] - self.assertEqual(expected, self.m_free_mapping.call_args_list) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) + assert self.m_free_mapping.call_args_list == expected + assert self.m_as_compatible_cubes.call_count == 1 - def test_mapped__dim_coords_with_broadcast_flip(self): + def test_mapped__dim_coords_with_broadcast_flip(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -1965,16 +1980,16 @@ def test_mapped__dim_coords_with_broadcast_flip(self): self.resolve._src_cube_resolved.shape = broadcast_shape self.resolve._tgt_cube_resolved.shape = (1, 4, 3, 2) self.resolve._metadata_mapping() - self.assertEqual(mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(0, self.m_aux_mapping.call_count) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(2, self.m_as_compatible_cubes.call_count) - self.assertEqual(not self.map_rhs_to_lhs, self.resolve.map_rhs_to_lhs) - - def test_mapped__dim_coords_free_flip_with_free_flip(self): + assert self.resolve.mapping == mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 0 + assert self.m_free_mapping.call_count == 0 + assert self.m_as_compatible_cubes.call_count == 2 + assert self.resolve.map_rhs_to_lhs != self.map_rhs_to_lhs + + def test_mapped__dim_coords_free_flip_with_free_flip(self, mocker): # key: (state) c=common, f=free, l=local # (coord) a=aux, d=dim # @@ -1997,28 +2012,29 @@ def test_mapped__dim_coords_free_flip_with_free_flip(self): self.tgt_dim_coverage.dims_free = [0, 1] self.tgt_aux_coverage.dims_free = [0, 1] self.resolve._metadata_mapping() - self.assertEqual(mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(1, self.m_free_mapping.call_count) + assert self.resolve.mapping == mapping + assert self.m_dim_mapping.call_count == 1 + expected = [mocker.call(self.src_dim_coverage, self.tgt_dim_coverage)] + assert self.m_dim_mapping.call_args_list == expected + assert self.m_aux_mapping.call_count == 1 + expected = [mocker.call(self.src_aux_coverage, self.tgt_aux_coverage)] + assert self.m_aux_mapping.call_args_list == expected + assert self.m_free_mapping.call_count == 1 expected = [ - mock.call( + mocker.call( self.src_dim_coverage, self.tgt_dim_coverage, self.src_aux_coverage, self.tgt_aux_coverage, ) ] - self.assertEqual(expected, self.m_free_mapping.call_args_list) - self.assertEqual(2, self.m_as_compatible_cubes.call_count) + assert self.m_free_mapping.call_args_list == expected + assert self.m_as_compatible_cubes.call_count == 2 -class Test__prepare_common_dim_payload(tests.IrisTest): - def setUp(self): +class Test__prepare_common_dim_payload: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -2031,26 +2047,30 @@ def setUp(self): # src-to-tgt mapping: # 0->1, 1->2, 2->3 self.points = ( - sentinel.points_0, - sentinel.points_1, - sentinel.points_2, - sentinel.points_3, + mocker.sentinel.points_0, + mocker.sentinel.points_1, + mocker.sentinel.points_2, + mocker.sentinel.points_3, + ) + self.bounds = ( + mocker.sentinel.bounds_0, + mocker.sentinel.bounds_1, + mocker.sentinel.bounds_2, ) - self.bounds = sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2 self.pb_0 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[0])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[0])), + mocker.Mock(copy=mocker.Mock(return_value=self.bounds[0])), ) self.pb_1 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[1])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[1])), None, ) self.pb_2 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[2])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[2])), + mocker.Mock(copy=mocker.Mock(return_value=self.bounds[2])), ) side_effect = (self.pb_0, self.pb_1, self.pb_2) - self.m_prepare_points_and_bounds = self.patch( + self.m_prepare_points_and_bounds = mocker.patch( "iris.common.resolve.Resolve._prepare_points_and_bounds", side_effect=side_effect, ) @@ -2061,12 +2081,12 @@ def setUp(self): self.mapping = {0: 1, 1: 2, 2: 3} self.resolve.mapping = self.mapping self.metadata_combined = ( - sentinel.combined_0, - sentinel.combined_1, - sentinel.combined_2, + mocker.sentinel.combined_0, + mocker.sentinel.combined_1, + mocker.sentinel.combined_2, ) - self.src_metadata = mock.Mock( - combine=mock.Mock(side_effect=self.metadata_combined) + self.src_metadata = mocker.Mock( + combine=mocker.Mock(side_effect=self.metadata_combined) ) metadata = [self.src_metadata] * len(self.mapping) self.src_coords = [ @@ -2074,9 +2094,9 @@ def setUp(self): # be of a class which is not-a-MeshCoord. # NOTE: strictly, bounds should =above values, and support .copy(). # For these tests, just omitting them works + is simpler. - Mock(spec=DimCoord, points=self.points[0], bounds=None), - Mock(spec=DimCoord, points=self.points[1], bounds=None), - Mock(spec=DimCoord, points=self.points[2], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[0], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[1], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[2], bounds=None), ] self.src_dims_common = [0, 1, 2] self.container = DimCoord @@ -2089,20 +2109,20 @@ def setUp(self): dims_free=[], ) self.tgt_metadata = [ - sentinel.tgt_metadata_0, - sentinel.tgt_metadata_1, - sentinel.tgt_metadata_2, - sentinel.tgt_metadata_3, + mocker.sentinel.tgt_metadata_0, + mocker.sentinel.tgt_metadata_1, + mocker.sentinel.tgt_metadata_2, + mocker.sentinel.tgt_metadata_3, ] self.tgt_coords = [ # N.B. these need to mimic a Coord with points and bounds, and # be of a class which is not-a-MeshCoord. # NOTE: strictly, bounds should =above values, and support .copy(). # For these tests, just omitting them works + is simpler. - Mock(spec=DimCoord, points=self.points[0], bounds=None), - Mock(spec=DimCoord, points=self.points[1], bounds=None), - Mock(spec=DimCoord, points=self.points[2], bounds=None), - Mock(spec=DimCoord, points=self.points[3], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[0], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[1], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[2], bounds=None), + mocker.Mock(spec=DimCoord, points=self.points[3], bounds=None), ] self.tgt_dims_common = [1, 2, 3] self.tgt_dim_coverage = _DimCoverage( @@ -2122,10 +2142,10 @@ def _check(self, ignore_mismatch=None, bad_points=None): self.tgt_dim_coverage, ignore_mismatch=ignore_mismatch, ) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_aux) == 0 + assert len(self.resolve.prepared_category.items_scalar) == 0 if not bad_points: - self.assertEqual(3, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 3 expected = [ _PreparedItem( metadata=_PreparedMetadata( @@ -2161,10 +2181,10 @@ def _check(self, ignore_mismatch=None, bad_points=None): container=self.container, ), ] - self.assertEqual(expected, self.resolve.prepared_category.items_dim) + assert self.resolve.prepared_category.items_dim == expected else: - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) + assert len(self.resolve.prepared_category.items_dim) == 0 + assert self.m_prepare_points_and_bounds.call_count == 3 if ignore_mismatch is None: ignore_mismatch = False expected = [ @@ -2190,11 +2210,11 @@ def _check(self, ignore_mismatch=None, bad_points=None): ignore_mismatch=ignore_mismatch, ), ] - self.assertEqual(expected, self.m_prepare_points_and_bounds.call_args_list) + assert self.m_prepare_points_and_bounds.call_args_list == expected if not bad_points: - self.assertEqual(3, self.src_metadata.combine.call_count) + assert self.src_metadata.combine.call_count == 3 expected = [mock.call(metadata) for metadata in self.tgt_metadata[1:]] - self.assertEqual(expected, self.src_metadata.combine.call_args_list) + assert self.src_metadata.combine.call_args_list == expected def test__default_ignore_mismatch(self): self._check() @@ -2211,8 +2231,9 @@ def test__bad_points(self): self._check(bad_points=True) -class Test__prepare_common_aux_payload(tests.IrisTest): - def setUp(self): +class Test__prepare_common_aux_payload: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # key: (state) c=common, f=free # (coord) a=aux, d=dim # @@ -2225,26 +2246,30 @@ def setUp(self): # src-to-tgt mapping: # 0->1, 1->2, 2->3 self.points = ( - sentinel.points_0, - sentinel.points_1, - sentinel.points_2, - sentinel.points_3, + mocker.sentinel.points_0, + mocker.sentinel.points_1, + mocker.sentinel.points_2, + mocker.sentinel.points_3, + ) + self.bounds = ( + mocker.sentinel.bounds_0, + mocker.sentinel.bounds_1, + mocker.sentinel.bounds_2, ) - self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) self.pb_0 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[0])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[0])), + mocker.Mock(copy=mocker.Mock(return_value=self.bounds[0])), ) self.pb_1 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[1])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[1])), None, ) self.pb_2 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[2])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), + mocker.Mock(copy=mocker.Mock(return_value=self.points[2])), + mocker.Mock(copy=mocker.Mock(return_value=self.bounds[2])), ) side_effect = (self.pb_0, self.pb_1, self.pb_2) - self.m_prepare_points_and_bounds = self.patch( + self.m_prepare_points_and_bounds = mocker.patch( "iris.common.resolve.Resolve._prepare_points_and_bounds", side_effect=side_effect, ) @@ -2256,14 +2281,14 @@ def setUp(self): self.resolve.mapping = self.mapping self.resolve.map_rhs_to_lhs = True self.metadata_combined = ( - sentinel.combined_0, - sentinel.combined_1, - sentinel.combined_2, + mocker.sentinel.combined_0, + mocker.sentinel.combined_1, + mocker.sentinel.combined_2, ) self.src_metadata = [ - mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[0])), - mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[1])), - mock.Mock(combine=mock.Mock(return_value=self.metadata_combined[2])), + mocker.Mock(combine=mocker.Mock(return_value=self.metadata_combined[0])), + mocker.Mock(combine=mocker.Mock(return_value=self.metadata_combined[1])), + mocker.Mock(combine=mocker.Mock(return_value=self.metadata_combined[2])), ] self.src_coords = [ # N.B. these need to mimic a Coord with points and bounds, but also @@ -2279,7 +2304,7 @@ def setUp(self): _Item(*item) for item in zip(self.src_metadata, self.src_coords, self.src_dims) ] - self.tgt_metadata = [sentinel.tgt_metadata_0] + self.src_metadata + self.tgt_metadata = [mocker.sentinel.tgt_metadata_0] + self.src_metadata self.tgt_coords = [ # N.B. these need to mimic a Coord with points and bounds, but also # the type() defines the 'container' property of a prepared item. @@ -2308,7 +2333,7 @@ def _check(self, ignore_mismatch=None, bad_points=None): ignore_mismatch=ignore_mismatch, ) if not bad_points: - self.assertEqual(3, len(prepared_items)) + assert len(prepared_items) == 3 expected = [ _PreparedItem( metadata=_PreparedMetadata( @@ -2344,10 +2369,10 @@ def _check(self, ignore_mismatch=None, bad_points=None): container=self.container, ), ] - self.assertEqual(expected, prepared_items) + assert prepared_items == expected else: - self.assertEqual(0, len(prepared_items)) - self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) + assert len(prepared_items) == 0 + assert self.m_prepare_points_and_bounds.call_count == 3 if ignore_mismatch is None: ignore_mismatch = False expected = [ @@ -2373,14 +2398,14 @@ def _check(self, ignore_mismatch=None, bad_points=None): ignore_mismatch=ignore_mismatch, ), ] - self.assertEqual(expected, self.m_prepare_points_and_bounds.call_args_list) + assert self.m_prepare_points_and_bounds.call_args_list == expected if not bad_points: for src_metadata, tgt_metadata in zip( self.src_metadata, self.tgt_metadata[1:] ): - self.assertEqual(1, src_metadata.combine.call_count) + assert src_metadata.combine.call_count == 1 expected = [mock.call(tgt_metadata)] - self.assertEqual(expected, src_metadata.combine.call_args_list) + assert src_metadata.combine.call_args_list == expected def test__default_ignore_mismatch(self): self._check() @@ -2403,7 +2428,7 @@ def test__no_tgt_metadata_match(self): self.resolve._prepare_common_aux_payload( self.src_common_items, tgt_common_items, prepared_items ) - self.assertEqual(0, len(prepared_items)) + assert len(prepared_items) == 0 def test__multi_tgt_metadata_match(self): item = self.tgt_common_items[1] @@ -2412,11 +2437,12 @@ def test__multi_tgt_metadata_match(self): self.resolve._prepare_common_aux_payload( self.src_common_items, tgt_common_items, prepared_items ) - self.assertEqual(0, len(prepared_items)) + assert len(prepared_items) == 0 -class Test__prepare_points_and_bounds(tests.IrisTest): - def setUp(self): +class Test__prepare_points_and_bounds: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Coord = namedtuple( "Coord", [ @@ -2432,10 +2458,10 @@ def setUp(self): self.Cube = namedtuple("Cube", ["name", "shape"]) self.resolve = Resolve() self.resolve.map_rhs_to_lhs = True - self.src_name = sentinel.src_name - self.src_points = sentinel.src_points - self.src_bounds = sentinel.src_bounds - self.src_metadata = sentinel.src_metadata + self.src_name = mocker.sentinel.src_name + self.src_points = mocker.sentinel.src_points + self.src_bounds = mocker.sentinel.src_bounds + self.src_metadata = mocker.sentinel.src_metadata self.src_items = dict( name=lambda: self.src_name, points=self.src_points, @@ -2445,10 +2471,10 @@ def setUp(self): shape=None, has_bounds=None, ) - self.tgt_name = sentinel.tgt_name - self.tgt_points = sentinel.tgt_points - self.tgt_bounds = sentinel.tgt_bounds - self.tgt_metadata = sentinel.tgt_metadata + self.tgt_name = mocker.sentinel.tgt_name + self.tgt_points = mocker.sentinel.tgt_points + self.tgt_bounds = mocker.sentinel.tgt_bounds + self.tgt_metadata = mocker.sentinel.tgt_metadata self.tgt_items = dict( name=lambda: self.tgt_name, points=self.tgt_points, @@ -2458,7 +2484,7 @@ def setUp(self): shape=None, has_bounds=None, ) - self.m_array_equal = self.patch( + self.m_array_equal = mocker.patch( "iris.util.array_equal", side_effect=(True, True) ) @@ -2470,8 +2496,8 @@ def test_coord_ndim_unequal__tgt_ndim_greater(self): points, bounds = self.resolve._prepare_points_and_bounds( src_coord, tgt_coord, src_dims=None, tgt_dims=None ) - self.assertEqual(self.tgt_points, points) - self.assertEqual(self.tgt_bounds, bounds) + assert points == self.tgt_points + assert bounds == self.tgt_bounds def test_coord_ndim_unequal__src_ndim_greater(self): self.src_items["ndim"] = 10 @@ -2481,8 +2507,8 @@ def test_coord_ndim_unequal__src_ndim_greater(self): points, bounds = self.resolve._prepare_points_and_bounds( src_coord, tgt_coord, src_dims=None, tgt_dims=None ) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) + assert points == self.src_points + assert bounds == self.src_bounds def test_coord_ndim_equal__shape_unequal_with_src_broadcasting(self): # key: (state) c=common, f=free @@ -2517,8 +2543,8 @@ def test_coord_ndim_equal__shape_unequal_with_src_broadcasting(self): points, bounds = self.resolve._prepare_points_and_bounds( src_coord, tgt_coord, src_dims, tgt_dims ) - self.assertEqual(self.tgt_points, points) - self.assertEqual(self.tgt_bounds, bounds) + assert points == self.tgt_points + assert bounds == self.tgt_bounds def test_coord_ndim_equal__shape_unequal_with_tgt_broadcasting(self): # key: (state) c=common, f=free @@ -2553,11 +2579,12 @@ def test_coord_ndim_equal__shape_unequal_with_tgt_broadcasting(self): points, bounds = self.resolve._prepare_points_and_bounds( src_coord, tgt_coord, src_dims, tgt_dims ) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) + assert points == self.src_points + assert bounds == self.src_bounds def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( self, + mocker, ): # key: (state) c=common, f=free # (coord) x=coord @@ -2579,7 +2606,7 @@ def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( src_shape = (9, 1) src_dims = tuple(mapping.keys()) self.resolve.rhs_cube = self.Cube( - name=lambda: sentinel.src_cube, shape=src_shape + name=lambda: mocker.sentinel.src_cube, shape=src_shape ) self.src_items["ndim"] = ndim self.src_items["shape"] = src_shape @@ -2587,13 +2614,13 @@ def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( tgt_shape = (1, 9) tgt_dims = tuple(mapping.values()) self.resolve.lhs_cube = self.Cube( - name=lambda: sentinel.tgt_cube, shape=tgt_shape + name=lambda: mocker.sentinel.tgt_cube, shape=tgt_shape ) self.tgt_items["ndim"] = ndim self.tgt_items["shape"] = tgt_shape tgt_coord = self.Coord(**self.tgt_items) emsg = "Cannot broadcast" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds( src_coord, tgt_coord, src_dims, tgt_dims ) @@ -2614,8 +2641,12 @@ def _populate(self, src_points, tgt_points, src_bounds=None, tgt_bounds=None): mapping = {0: 0, 1: 1} self.resolve.mapping = mapping self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube = self.Cube(name=lambda: sentinel.src_cube, shape=None) - self.resolve.lhs_cube = self.Cube(name=lambda: sentinel.tgt_cube, shape=None) + self.resolve.rhs_cube = self.Cube( + name=lambda: mock.sentinel.src_cube, shape=None + ) + self.resolve.lhs_cube = self.Cube( + name=lambda: mock.sentinel.tgt_cube, shape=None + ) ndim = 1 src_dims = 1 self.src_items["ndim"] = ndim @@ -2639,40 +2670,42 @@ def _populate(self, src_points, tgt_points, src_bounds=None, tgt_bounds=None): ) return args - def test_coord_ndim_and_shape_equal__points_equal_with_no_bounds(self): + def test_coord_ndim_and_shape_equal__points_equal_with_no_bounds(self, mocker): args = self._populate(self.src_points, self.src_points) points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertIsNone(bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) + assert points == self.src_points + assert bounds is None + assert self.m_array_equal.call_count == 1 + expected = [mocker.call(self.src_points, self.src_points, withnans=True)] + assert self.m_array_equal.call_args_list == expected def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only( self, + mocker, ): args = self._populate( self.src_points, self.src_points, src_bounds=self.src_bounds ) points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) + assert points == self.src_points + assert bounds == self.src_bounds + assert self.m_array_equal.call_count == 1 + expected = [mocker.call(self.src_points, self.src_points, withnans=True)] + assert self.m_array_equal.call_args_list == expected def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only( self, + mocker, ): args = self._populate( self.src_points, self.src_points, tgt_bounds=self.tgt_bounds ) points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.tgt_bounds, bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) + assert points == self.src_points + assert bounds == self.tgt_bounds + assert self.m_array_equal.call_count == 1 + expected = [mocker.call(self.src_points, self.src_points, withnans=True)] + assert self.m_array_equal.call_args_list == expected def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only_strict( self, @@ -2682,7 +2715,7 @@ def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only_strict( ) with LENIENT.context(maths=False): emsg = f"Coordinate {self.src_name} has bounds" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only_strict( @@ -2693,10 +2726,10 @@ def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only_strict( ) with LENIENT.context(maths=False): emsg = f"Coordinate {self.tgt_name} has bounds" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) - def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self): + def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self, mocker): args = self._populate( self.src_points, self.src_points, @@ -2704,14 +2737,14 @@ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self): tgt_bounds=self.src_bounds, ) points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - self.assertEqual(2, self.m_array_equal.call_count) + assert points == self.src_points + assert bounds == self.src_bounds + assert self.m_array_equal.call_count == 2 expected = [ - mock.call(self.src_points, self.src_points, withnans=True), - mock.call(self.src_bounds, self.src_bounds, withnans=True), + mocker.call(self.src_points, self.src_points, withnans=True), + mocker.call(self.src_bounds, self.src_bounds, withnans=True), ] - self.assertEqual(expected, self.m_array_equal.call_args_list) + assert self.m_array_equal.call_args_list == expected def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different( self, @@ -2724,11 +2757,12 @@ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different( tgt_bounds=self.tgt_bounds, ) emsg = f"Coordinate {self.src_name} has different bounds" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_ignore_mismatch( self, + mocker, ): self.m_array_equal.side_effect = (True, False) args = self._populate( @@ -2740,14 +2774,14 @@ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_ignore_m points, bounds = self.resolve._prepare_points_and_bounds( **args, ignore_mismatch=True ) - self.assertEqual(self.src_points, points) - self.assertIsNone(bounds) - self.assertEqual(2, self.m_array_equal.call_count) + assert points == self.src_points + assert bounds is None + assert self.m_array_equal.call_count == 2 expected = [ - mock.call(self.src_points, self.src_points, withnans=True), - mock.call(self.src_bounds, self.tgt_bounds, withnans=True), + mocker.call(self.src_points, self.src_points, withnans=True), + mocker.call(self.src_bounds, self.tgt_bounds, withnans=True), ] - self.assertEqual(expected, self.m_array_equal.call_args_list) + assert self.m_array_equal.call_args_list == expected def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_strict( self, @@ -2761,14 +2795,14 @@ def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_strict( ) with LENIENT.context(maths=False): emsg = f"Coordinate {self.src_name} has different bounds" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) def test_coord_ndim_and_shape_equal__points_different(self): self.m_array_equal.side_effect = (False,) args = self._populate(self.src_points, self.tgt_points) emsg = f"Coordinate {self.src_name} has different points" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) def test_coord_ndim_and_shape_equal__points_different_ignore_mismatch( @@ -2779,30 +2813,31 @@ def test_coord_ndim_and_shape_equal__points_different_ignore_mismatch( points, bounds = self.resolve._prepare_points_and_bounds( **args, ignore_mismatch=True ) - self.assertIsNone(points) - self.assertIsNone(bounds) + assert points is None + assert bounds is None def test_coord_ndim_and_shape_equal__points_different_strict(self): self.m_array_equal.side_effect = (False,) args = self._populate(self.src_points, self.tgt_points) with LENIENT.context(maths=False): emsg = f"Coordinate {self.src_name} has different points" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve._prepare_points_and_bounds(**args) -class Test__create_prepared_item(tests.IrisTest): - def setUp(self): +class Test__create_prepared_item: + @pytest.fixture(autouse=True) + def _setup(self, mocker): Coord = namedtuple("Coord", ["points", "bounds"]) - self.points_value = sentinel.points - self.points = mock.Mock(copy=mock.Mock(return_value=self.points_value)) - self.bounds_value = sentinel.bounds - self.bounds = mock.Mock(copy=mock.Mock(return_value=self.bounds_value)) + self.points_value = mocker.sentinel.points + self.points = mocker.Mock(copy=mocker.Mock(return_value=self.points_value)) + self.bounds_value = mocker.sentinel.bounds + self.bounds = mocker.Mock(copy=mocker.Mock(return_value=self.bounds_value)) self.coord = Coord(points=self.points, bounds=self.bounds) self.container = type(self.coord) - self.combined = sentinel.combined - self.src = mock.Mock(combine=mock.Mock(return_value=self.combined)) - self.tgt = sentinel.tgt + self.combined = mocker.sentinel.combined + self.src = mocker.Mock(combine=mocker.Mock(return_value=self.combined)) + self.tgt = mocker.sentinel.tgt def _check(self, src=None, tgt=None): dims = 0 @@ -2813,18 +2848,18 @@ def _check(self, src=None, tgt=None): result = Resolve._create_prepared_item( self.coord, dims, src_metadata=src, tgt_metadata=tgt ) - self.assertIsInstance(result, _PreparedItem) - self.assertIsInstance(result.metadata, _PreparedMetadata) + assert isinstance(result, _PreparedItem) + assert isinstance(result.metadata, _PreparedMetadata) expected = _PreparedMetadata(combined=combined, src=src, tgt=tgt) - self.assertEqual(expected, result.metadata) - self.assertEqual(self.points_value, result.points) - self.assertEqual(1, self.points.copy.call_count) - self.assertEqual([mock.call()], self.points.copy.call_args_list) - self.assertEqual(self.bounds_value, result.bounds) - self.assertEqual(1, self.bounds.copy.call_count) - self.assertEqual([mock.call()], self.bounds.copy.call_args_list) - self.assertEqual((dims,), result.dims) - self.assertEqual(self.container, result.container) + assert result.metadata == expected + assert result.points == self.points_value + assert self.points.copy.call_count == 1 + assert self.points.copy.call_args_list == [mock.call()] + assert result.bounds == self.bounds_value + assert self.bounds.copy.call_count == 1 + assert self.bounds.copy.call_args_list == [mock.call()] + assert result.dims == (dims,) + assert result.container == self.container def test__no_metadata(self): self._check() @@ -2839,8 +2874,9 @@ def test__combine_metadata(self): self._check(src=self.src, tgt=self.tgt) -class Test__prepare_local_payload_dim(tests.IrisTest): - def setUp(self): +class Test__prepare_local_payload_dim: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Cube = namedtuple("Cube", ["ndim"]) self.resolve = Resolve() self.resolve.prepared_category = _CategoryItems( @@ -2856,8 +2892,8 @@ def setUp(self): dims_free=None, ) self.tgt_coverage = deepcopy(self.src_coverage) - self.prepared_item = sentinel.prepared_item - self.m_create_prepared_item = self.patch( + self.prepared_item = mocker.sentinel.prepared_item + self.m_create_prepared_item = mocker.patch( "iris.common.resolve.Resolve._create_prepared_item", return_value=self.prepared_item, ) @@ -2880,7 +2916,7 @@ def test_src_no_local_with_tgt_no_local(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _DimCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 def test_src_no_local_with_tgt_no_local__strict(self): # key: (state) c=common, f=free, l=local @@ -2901,7 +2937,7 @@ def test_src_no_local_with_tgt_no_local__strict(self): tgt_coverage = _DimCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 def test_src_local_with_tgt_local(self): # key: (state) c=common, f=free, l=local @@ -2923,7 +2959,7 @@ def test_src_local_with_tgt_local(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _DimCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 def test_src_local_with_tgt_local__strict(self): # key: (state) c=common, f=free, l=local @@ -2946,9 +2982,9 @@ def test_src_local_with_tgt_local__strict(self): tgt_coverage = _DimCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 - def test_src_local_with_tgt_free(self): + def test_src_local_with_tgt_free(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -2964,23 +3000,21 @@ def test_src_local_with_tgt_free(self): self.resolve.mapping = mapping src_dim = 1 self.src_coverage["dims_local"] = (src_dim,) - src_metadata = sentinel.src_metadata + src_metadata = mocker.sentinel.src_metadata self.src_coverage["metadata"] = [None, src_metadata] - src_coord = sentinel.src_coord + src_coord = mocker.sentinel.src_coord self.src_coverage["coords"] = [None, src_coord] src_coverage = _DimCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _DimCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert len(self.resolve.prepared_category.items_dim) == 1 + assert self.resolve.prepared_category.items_dim[0] == self.prepared_item + assert self.m_create_prepared_item.call_count == 1 + expected = [mocker.call(src_coord, mapping[src_dim], src_metadata=src_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_free__strict(self): + def test_src_local_with_tgt_free__strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -2996,18 +3030,18 @@ def test_src_local_with_tgt_free__strict(self): self.resolve.mapping = mapping src_dim = 1 self.src_coverage["dims_local"] = (src_dim,) - src_metadata = sentinel.src_metadata + src_metadata = mocker.sentinel.src_metadata self.src_coverage["metadata"] = [None, src_metadata] - src_coord = sentinel.src_coord + src_coord = mocker.sentinel.src_coord self.src_coverage["coords"] = [None, src_coord] src_coverage = _DimCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _DimCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 - def test_src_free_with_tgt_local(self): + def test_src_free_with_tgt_local(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3025,21 +3059,19 @@ def test_src_free_with_tgt_local(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_dim = 1 self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata + tgt_metadata = mocker.sentinel.tgt_metadata self.tgt_coverage["metadata"] = [None, tgt_metadata] - tgt_coord = sentinel.tgt_coord + tgt_coord = mocker.sentinel.tgt_coord self.tgt_coverage["coords"] = [None, tgt_coord] tgt_coverage = _DimCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert len(self.resolve.prepared_category.items_dim) == 1 + assert self.prepared_item == self.resolve.prepared_category.items_dim[0] + assert self.m_create_prepared_item.call_count == 1 + expected = [mocker.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_free_with_tgt_local__strict(self): + def test_src_free_with_tgt_local__strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3057,16 +3089,16 @@ def test_src_free_with_tgt_local__strict(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_dim = 1 self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata + tgt_metadata = mocker.sentinel.tgt_metadata self.tgt_coverage["metadata"] = [None, tgt_metadata] - tgt_coord = sentinel.tgt_coord + tgt_coord = mocker.sentinel.tgt_coord self.tgt_coverage["coords"] = [None, tgt_coord] tgt_coverage = _DimCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) + assert len(self.resolve.prepared_category.items_dim) == 0 - def test_src_no_local_with_tgt_local__extra_dims(self): + def test_src_no_local_with_tgt_local__extra_dims(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3084,21 +3116,19 @@ def test_src_no_local_with_tgt_local__extra_dims(self): self.tgt_coverage["cube"] = self.Cube(ndim=3) tgt_dim = 0 self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata + tgt_metadata = mocker.sentinel.tgt_metadata self.tgt_coverage["metadata"] = [tgt_metadata, None, None] - tgt_coord = sentinel.tgt_coord + tgt_coord = mocker.sentinel.tgt_coord self.tgt_coverage["coords"] = [tgt_coord, None, None] tgt_coverage = _DimCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert len(self.resolve.prepared_category.items_dim) == 1 + assert self.resolve.prepared_category.items_dim[0] == self.prepared_item + assert self.m_create_prepared_item.call_count == 1 + expected = [mocker.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_no_local_with_tgt_local__extra_dims_strict(self): + def test_src_no_local_with_tgt_local__extra_dims_strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3116,24 +3146,23 @@ def test_src_no_local_with_tgt_local__extra_dims_strict(self): self.tgt_coverage["cube"] = self.Cube(ndim=3) tgt_dim = 0 self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata + tgt_metadata = mocker.sentinel.tgt_metadata self.tgt_coverage["metadata"] = [tgt_metadata, None, None] - tgt_coord = sentinel.tgt_coord + tgt_coord = mocker.sentinel.tgt_coord self.tgt_coverage["coords"] = [tgt_coord, None, None] tgt_coverage = _DimCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert len(self.resolve.prepared_category.items_dim) == 1 + assert self.resolve.prepared_category.items_dim[0] == self.prepared_item + assert self.m_create_prepared_item.call_count == 1 + expected = [mocker.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected -class Test__prepare_local_payload_aux(tests.IrisTest): - def setUp(self): +class Test__prepare_local_payload_aux: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Cube = namedtuple("Cube", ["ndim"]) self.resolve = Resolve() self.resolve.prepared_category = _CategoryItems( @@ -3151,9 +3180,9 @@ def setUp(self): dims_free=None, ) self.tgt_coverage = deepcopy(self.src_coverage) - self.src_prepared_item = sentinel.src_prepared_item - self.tgt_prepared_item = sentinel.tgt_prepared_item - self.m_create_prepared_item = self.patch( + self.src_prepared_item = mocker.sentinel.src_prepared_item + self.tgt_prepared_item = mocker.sentinel.tgt_prepared_item + self.m_create_prepared_item = mocker.patch( "iris.common.resolve.Resolve._create_prepared_item", side_effect=(self.src_prepared_item, self.tgt_prepared_item), ) @@ -3176,7 +3205,7 @@ def test_src_no_local_with_tgt_no_local(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 0 def test_src_no_local_with_tgt_no_local__strict(self): # key: (state) c=common, f=free, l=local @@ -3197,9 +3226,9 @@ def test_src_no_local_with_tgt_no_local__strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 0 - def test_src_local_with_tgt_local(self): + def test_src_local_with_tgt_local(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3213,30 +3242,30 @@ def test_src_local_with_tgt_local(self): # 0->0, 1->1 mapping = {0: 0, 1: 1} self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_dims = (1,) src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) self.src_coverage["local_items_aux"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (1,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 2 expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) + assert self.resolve.prepared_category.items_aux == expected expected = [ - mock.call(src_coord, tgt_dims, src_metadata=src_metadata), - mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata), + mocker.call(src_coord, tgt_dims, src_metadata=src_metadata), + mocker.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata), ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_local__strict(self): + def test_src_local_with_tgt_local__strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3250,24 +3279,24 @@ def test_src_local_with_tgt_local__strict(self): # 0->0, 1->1 mapping = {0: 0, 1: 1} self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_dims = (1,) src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) self.src_coverage["local_items_aux"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (1,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 0 - def test_src_local_with_tgt_free(self): + def test_src_local_with_tgt_free(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3281,8 +3310,8 @@ def test_src_local_with_tgt_free(self): # 0->0, 1->1 mapping = {0: 0, 1: 1} self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_dims = (1,) src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) self.src_coverage["local_items_aux"].append(src_item) @@ -3291,13 +3320,13 @@ def test_src_local_with_tgt_free(self): self.tgt_coverage["cube"] = self.Cube(ndim=2) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 1 expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(src_coord, src_dims, src_metadata=src_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_aux == expected + expected = [mocker.call(src_coord, src_dims, src_metadata=src_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_free__strict(self): + def test_src_local_with_tgt_free__strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3311,8 +3340,8 @@ def test_src_local_with_tgt_free__strict(self): # 0->0, 1->1 mapping = {0: 0, 1: 1} self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_dims = (1,) src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) self.src_coverage["local_items_aux"].append(src_item) @@ -3322,9 +3351,9 @@ def test_src_local_with_tgt_free__strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 0 - def test_src_free_with_tgt_local(self): + def test_src_free_with_tgt_local(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3341,21 +3370,21 @@ def test_src_free_with_tgt_local(self): self.resolve.mapping = mapping src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (1,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) self.tgt_coverage["dims_local"].extend(tgt_dims) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_aux == expected + expected = [mocker.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_free_with_tgt_local__strict(self): + def test_src_free_with_tgt_local__strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3372,8 +3401,8 @@ def test_src_free_with_tgt_local__strict(self): self.resolve.mapping = mapping src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (1,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) @@ -3381,9 +3410,9 @@ def test_src_free_with_tgt_local__strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 0 - def test_src_no_local_with_tgt_local__extra_dims(self): + def test_src_no_local_with_tgt_local__extra_dims(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3400,21 +3429,21 @@ def test_src_no_local_with_tgt_local__extra_dims(self): self.resolve.mapping = mapping src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (0,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) self.tgt_coverage["dims_local"].extend(tgt_dims) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_aux == expected + expected = [mocker.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_no_local_with_tgt_local__extra_dims_strict(self): + def test_src_no_local_with_tgt_local__extra_dims_strict(self, mocker): # key: (state) c=common, f=free, l=local # (coord) d=dim # @@ -3431,8 +3460,8 @@ def test_src_no_local_with_tgt_local__extra_dims_strict(self): self.resolve.mapping = mapping src_coverage = _AuxCoverage(**self.src_coverage) self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_dims = (0,) tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) self.tgt_coverage["local_items_aux"].append(tgt_item) @@ -3440,15 +3469,16 @@ def test_src_no_local_with_tgt_local__extra_dims_strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=True): self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) + assert len(self.resolve.prepared_category.items_aux) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_aux == expected + expected = [mocker.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected -class Test__prepare_local_payload_scalar(tests.IrisTest): - def setUp(self): +class Test__prepare_local_payload_scalar: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Cube = namedtuple("Cube", ["ndim"]) self.resolve = Resolve() self.resolve.prepared_category = _CategoryItems( @@ -3465,9 +3495,9 @@ def setUp(self): dims_free=None, ) self.tgt_coverage = deepcopy(self.src_coverage) - self.src_prepared_item = sentinel.src_prepared_item - self.tgt_prepared_item = sentinel.tgt_prepared_item - self.m_create_prepared_item = self.patch( + self.src_prepared_item = mocker.sentinel.src_prepared_item + self.tgt_prepared_item = mocker.sentinel.tgt_prepared_item + self.m_create_prepared_item = mocker.patch( "iris.common.resolve.Resolve._create_prepared_item", side_effect=(self.src_prepared_item, self.tgt_prepared_item), ) @@ -3480,7 +3510,7 @@ def test_src_no_local_with_tgt_no_local(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 def test_src_no_local_with_tgt_no_local__strict(self): ndim = 2 @@ -3489,7 +3519,7 @@ def test_src_no_local_with_tgt_no_local__strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 def test_src_no_local_with_tgt_no_local__src_scalar_cube(self): ndim = 0 @@ -3497,7 +3527,7 @@ def test_src_no_local_with_tgt_no_local__src_scalar_cube(self): src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self): ndim = 0 @@ -3506,223 +3536,223 @@ def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self): tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 - def test_src_local_with_tgt_no_local(self): + def test_src_local_with_tgt_no_local(self, mocker): ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 1 expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [mock.call(src_coord, self.src_dims, src_metadata=src_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_scalar == expected + expected = [mocker.call(src_coord, self.src_dims, src_metadata=src_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_no_local__strict(self): + def test_src_local_with_tgt_no_local__strict(self, mocker): ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 - def test_src_local_with_tgt_no_local__src_scalar_cube(self): + def test_src_local_with_tgt_no_local__src_scalar_cube(self, mocker): ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 1 expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [mock.call(src_coord, self.src_dims, src_metadata=src_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_scalar == expected + expected = [mocker.call(src_coord, self.src_dims, src_metadata=src_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self): + def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self, mocker): ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 - def test_src_no_local_with_tgt_local(self): + def test_src_no_local_with_tgt_local(self, mocker): self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_scalar == expected + expected = [mocker.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_no_local_with_tgt_local__strict(self): + def test_src_no_local_with_tgt_local__strict(self, mocker): self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 - def test_src_no_local_with_tgt_local__src_scalar_cube(self): + def test_src_no_local_with_tgt_local__src_scalar_cube(self, mocker): self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_scalar == expected + expected = [mocker.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self): + def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self, mocker): self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 1 expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.resolve.prepared_category.items_scalar == expected + expected = [mocker.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata)] + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_local(self): + def test_src_local_with_tgt_local(self, mocker): ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 2 expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + assert self.resolve.prepared_category.items_scalar == expected expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata), - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), + mocker.call(src_coord, self.src_dims, src_metadata=src_metadata), + mocker.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_local__strict(self): + def test_src_local_with_tgt_local__strict(self, mocker): ndim = 2 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 - def test_src_local_with_tgt_local__src_scalar_cube(self): + def test_src_local_with_tgt_local__src_scalar_cube(self, mocker): ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 2 expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) + assert self.resolve.prepared_category.items_scalar == expected expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata), - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), + mocker.call(src_coord, self.src_dims, src_metadata=src_metadata), + mocker.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_src_local_with_tgt_local__src_scalar_cube_strict(self): + def test_src_local_with_tgt_local__src_scalar_cube_strict(self, mocker): ndim = 0 self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord + src_metadata = mocker.sentinel.src_metadata + src_coord = mocker.sentinel.src_coord src_item = _Item(metadata=src_metadata, coord=src_coord, dims=self.src_dims) self.src_coverage["local_items_scalar"].append(src_item) src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord + tgt_metadata = mocker.sentinel.tgt_metadata + tgt_coord = mocker.sentinel.tgt_coord tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims) self.tgt_coverage["local_items_scalar"].append(tgt_item) tgt_coverage = _AuxCoverage(**self.tgt_coverage) with LENIENT.context(maths=False): self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) + assert len(self.resolve.prepared_category.items_scalar) == 0 -class Test__prepare_local_payload(tests.IrisTest): - def test(self): - src_dim_coverage = sentinel.src_dim_coverage - src_aux_coverage = sentinel.src_aux_coverage - tgt_dim_coverage = sentinel.tgt_dim_coverage - tgt_aux_coverage = sentinel.tgt_aux_coverage +class Test__prepare_local_payload: + def test(self, mocker): + src_dim_coverage = mocker.sentinel.src_dim_coverage + src_aux_coverage = mocker.sentinel.src_aux_coverage + tgt_dim_coverage = mocker.sentinel.tgt_dim_coverage + tgt_aux_coverage = mocker.sentinel.tgt_aux_coverage root = "iris.common.resolve.Resolve" - m_prepare_dim = self.patch(f"{root}._prepare_local_payload_dim") - m_prepare_aux = self.patch(f"{root}._prepare_local_payload_aux") - m_prepare_scalar = self.patch(f"{root}._prepare_local_payload_scalar") + m_prepare_dim = mocker.patch(f"{root}._prepare_local_payload_dim") + m_prepare_aux = mocker.patch(f"{root}._prepare_local_payload_aux") + m_prepare_scalar = mocker.patch(f"{root}._prepare_local_payload_scalar") resolve = Resolve() resolve._prepare_local_payload( src_dim_coverage, @@ -3730,55 +3760,58 @@ def test(self): tgt_dim_coverage, tgt_aux_coverage, ) - self.assertEqual(1, m_prepare_dim.call_count) - expected = [mock.call(src_dim_coverage, tgt_dim_coverage)] - self.assertEqual(expected, m_prepare_dim.call_args_list) - self.assertEqual(1, m_prepare_aux.call_count) - expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] - self.assertEqual(expected, m_prepare_aux.call_args_list) - self.assertEqual(1, m_prepare_scalar.call_count) - expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] - self.assertEqual(expected, m_prepare_scalar.call_args_list) - - -class Test__metadata_prepare(tests.IrisTest): - def setUp(self): - self.src_cube = sentinel.src_cube - self.src_category_local = sentinel.src_category_local - self.src_dim_coverage = sentinel.src_dim_coverage - self.src_aux_coverage = mock.Mock( - common_items_aux=sentinel.src_aux_coverage_common_items_aux, - common_items_scalar=sentinel.src_aux_coverage_common_items_scalar, - ) - self.tgt_cube = sentinel.tgt_cube - self.tgt_category_local = sentinel.tgt_category_local - self.tgt_dim_coverage = sentinel.tgt_dim_coverage - self.tgt_aux_coverage = mock.Mock( - common_items_aux=sentinel.tgt_aux_coverage_common_items_aux, - common_items_scalar=sentinel.tgt_aux_coverage_common_items_scalar, + assert m_prepare_dim.call_count == 1 + expected = [mocker.call(src_dim_coverage, tgt_dim_coverage)] + assert m_prepare_dim.call_args_list == expected + assert m_prepare_aux.call_count == 1 + expected = [mocker.call(src_aux_coverage, tgt_aux_coverage)] + assert m_prepare_aux.call_args_list == expected + assert m_prepare_scalar.call_count == 1 + expected = [mocker.call(src_aux_coverage, tgt_aux_coverage)] + assert m_prepare_scalar.call_args_list == expected + + +class Test__metadata_prepare: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.src_cube = mocker.sentinel.src_cube + self.src_category_local = mocker.sentinel.src_category_local + self.src_dim_coverage = mocker.sentinel.src_dim_coverage + self.src_aux_coverage = mocker.Mock( + common_items_aux=mocker.sentinel.src_aux_coverage_common_items_aux, + common_items_scalar=mocker.sentinel.src_aux_coverage_common_items_scalar, + ) + self.tgt_cube = mocker.sentinel.tgt_cube + self.tgt_category_local = mocker.sentinel.tgt_category_local + self.tgt_dim_coverage = mocker.sentinel.tgt_dim_coverage + self.tgt_aux_coverage = mocker.Mock( + common_items_aux=mocker.sentinel.tgt_aux_coverage_common_items_aux, + common_items_scalar=mocker.sentinel.tgt_aux_coverage_common_items_scalar, ) self.resolve = Resolve() root = "iris.common.resolve.Resolve" - self.m_prepare_common_dim_payload = self.patch( + self.m_prepare_common_dim_payload = mocker.patch( f"{root}._prepare_common_dim_payload" ) - self.m_prepare_common_aux_payload = self.patch( + self.m_prepare_common_aux_payload = mocker.patch( f"{root}._prepare_common_aux_payload" ) - self.m_prepare_local_payload = self.patch(f"{root}._prepare_local_payload") - self.m_prepare_factory_payload = self.patch(f"{root}._prepare_factory_payload") + self.m_prepare_local_payload = mocker.patch(f"{root}._prepare_local_payload") + self.m_prepare_factory_payload = mocker.patch( + f"{root}._prepare_factory_payload" + ) def _check(self): - self.assertIsNone(self.resolve.prepared_category) - self.assertIsNone(self.resolve.prepared_factories) + assert self.resolve.prepared_category is None + assert self.resolve.prepared_factories is None self.resolve._metadata_prepare() expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, self.resolve.prepared_category) - self.assertEqual([], self.resolve.prepared_factories) - self.assertEqual(1, self.m_prepare_common_dim_payload.call_count) + assert self.resolve.prepared_category == expected + assert self.resolve.prepared_factories == [] + assert self.m_prepare_common_dim_payload.call_count == 1 expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_prepare_common_dim_payload.call_args_list) - self.assertEqual(2, self.m_prepare_common_aux_payload.call_count) + assert self.m_prepare_common_dim_payload.call_args_list == expected + assert self.m_prepare_common_aux_payload.call_count == 2 expected = [ mock.call( self.src_aux_coverage.common_items_aux, @@ -3792,8 +3825,8 @@ def _check(self): ignore_mismatch=True, ), ] - self.assertEqual(expected, self.m_prepare_common_aux_payload.call_args_list) - self.assertEqual(1, self.m_prepare_local_payload.call_count) + assert self.m_prepare_common_aux_payload.call_args_list == expected + assert self.m_prepare_local_payload.call_count == 1 expected = [ mock.call( self.src_dim_coverage, @@ -3802,13 +3835,13 @@ def _check(self): self.tgt_aux_coverage, ) ] - self.assertEqual(expected, self.m_prepare_local_payload.call_args_list) - self.assertEqual(2, self.m_prepare_factory_payload.call_count) + assert self.m_prepare_local_payload.call_args_list == expected + assert self.m_prepare_factory_payload.call_count == 2 expected = [ mock.call(self.tgt_cube, self.tgt_category_local, from_src=False), mock.call(self.src_cube, self.src_category_local), ] - self.assertEqual(expected, self.m_prepare_factory_payload.call_args_list) + assert self.m_prepare_factory_payload.call_args_list == expected def test_map_rhs_to_lhs__true(self): self.resolve.map_rhs_to_lhs = True @@ -3835,8 +3868,9 @@ def test_map_rhs_to_lhs__false(self): self._check() -class Test__prepare_factory_payload(tests.IrisTest): - def setUp(self): +class Test__prepare_factory_payload: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.Cube = namedtuple("Cube", ["aux_factories"]) self.Coord = namedtuple("Coord", ["metadata"]) self.Factory_T1 = namedtuple( @@ -3850,16 +3884,16 @@ def setUp(self): self.resolve = Resolve() self.resolve.map_rhs_to_lhs = True self.resolve.prepared_factories = [] - self.m_get_prepared_item = self.patch( + self.m_get_prepared_item = mocker.patch( "iris.common.resolve.Resolve._get_prepared_item" ) - self.category_local = sentinel.category_local - self.from_src = sentinel.from_src + self.category_local = mocker.sentinel.category_local + self.from_src = mocker.sentinel.from_src def test_no_factory(self): cube = self.Cube(aux_factories=[]) self.resolve._prepare_factory_payload(cube, self.category_local) - self.assertEqual(0, len(self.resolve.prepared_factories)) + assert len(self.resolve.prepared_factories) == 0 def test_skip_factory__already_prepared(self): aux_factory = self.Factory_T1(dependencies=None) @@ -3871,12 +3905,12 @@ def test_skip_factory__already_prepared(self): ] self.resolve.prepared_factories.extend(prepared_factories) self.resolve._prepare_factory_payload(cube, self.category_local) - self.assertEqual(prepared_factories, self.resolve.prepared_factories) + assert self.resolve.prepared_factories == prepared_factories - def test_factory__dependency_already_prepared(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + def test_factory__dependency_already_prepared(self, mocker): + coord_a = self.Coord(metadata=mocker.sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=mocker.sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=mocker.sentinel.coord_c_metadata) side_effect = (coord_a, coord_b, coord_c) self.m_get_prepared_item.side_effect = side_effect dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) @@ -3886,7 +3920,7 @@ def test_factory__dependency_already_prepared(self): self.resolve._prepare_factory_payload( cube, self.category_local, from_src=self.from_src ) - self.assertEqual(1, len(self.resolve.prepared_factories)) + assert len(self.resolve.prepared_factories) == 1 prepared_dependencies = { name: coord.metadata for name, coord in dependencies.items() } @@ -3895,21 +3929,21 @@ def test_factory__dependency_already_prepared(self): container=self.container_T1, dependencies=prepared_dependencies ) ] - self.assertEqual(expected, self.resolve.prepared_factories) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + assert self.resolve.prepared_factories == expected + assert self.m_get_prepared_item.call_count == len(side_effect) expected = [ - mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_c.metadata, self.category_local, from_src=self.from_src), ] actual = self.m_get_prepared_item.call_args_list for call in expected: - self.assertIn(call, actual) + assert call in actual - def test_factory__dependency_local_not_prepared(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + def test_factory__dependency_local_not_prepared(self, mocker): + coord_a = self.Coord(metadata=mocker.sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=mocker.sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=mocker.sentinel.coord_c_metadata) side_effect = (None, coord_a, None, coord_b, None, coord_c) self.m_get_prepared_item.side_effect = side_effect dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) @@ -3919,7 +3953,7 @@ def test_factory__dependency_local_not_prepared(self): self.resolve._prepare_factory_payload( cube, self.category_local, from_src=self.from_src ) - self.assertEqual(1, len(self.resolve.prepared_factories)) + assert len(self.resolve.prepared_factories) == 1 prepared_dependencies = { name: coord.metadata for name, coord in dependencies.items() } @@ -3928,25 +3962,25 @@ def test_factory__dependency_local_not_prepared(self): container=self.container_T1, dependencies=prepared_dependencies ) ] - self.assertEqual(expected, self.resolve.prepared_factories) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + assert self.resolve.prepared_factories == expected + assert self.m_get_prepared_item.call_count == len(side_effect) expected = [ - mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), - mock.call( + mocker.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_c.metadata, self.category_local, from_src=self.from_src), + mocker.call( coord_a.metadata, self.category_local, from_src=self.from_src, from_local=True, ), - mock.call( + mocker.call( coord_b.metadata, self.category_local, from_src=self.from_src, from_local=True, ), - mock.call( + mocker.call( coord_c.metadata, self.category_local, from_src=self.from_src, @@ -3955,12 +3989,12 @@ def test_factory__dependency_local_not_prepared(self): ] actual = self.m_get_prepared_item.call_args_list for call in expected: - self.assertIn(call, actual) + assert call in actual - def test_factory__dependency_not_found(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) + def test_factory__dependency_not_found(self, mocker): + coord_a = self.Coord(metadata=mocker.sentinel.coord_a_metadata) + coord_b = self.Coord(metadata=mocker.sentinel.coord_b_metadata) + coord_c = self.Coord(metadata=mocker.sentinel.coord_c_metadata) side_effect = (None, None) self.m_get_prepared_item.side_effect = side_effect dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) @@ -3970,25 +4004,25 @@ def test_factory__dependency_not_found(self): self.resolve._prepare_factory_payload( cube, self.category_local, from_src=self.from_src ) - self.assertEqual(0, len(self.resolve.prepared_factories)) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) + assert len(self.resolve.prepared_factories) == 0 + assert self.m_get_prepared_item.call_count == len(side_effect) expected = [ - mock.call(coord_a.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_b.metadata, self.category_local, from_src=self.from_src), - mock.call(coord_c.metadata, self.category_local, from_src=self.from_src), - mock.call( + mocker.call(coord_a.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_b.metadata, self.category_local, from_src=self.from_src), + mocker.call(coord_c.metadata, self.category_local, from_src=self.from_src), + mocker.call( coord_a.metadata, self.category_local, from_src=self.from_src, from_local=True, ), - mock.call( + mocker.call( coord_b.metadata, self.category_local, from_src=self.from_src, from_local=True, ), - mock.call( + mocker.call( coord_c.metadata, self.category_local, from_src=self.from_src, @@ -3997,15 +4031,16 @@ def test_factory__dependency_not_found(self): ] actual = self.m_get_prepared_item.call_args_list for call in actual: - self.assertIn(call, expected) + assert call in expected -class Test__get_prepared_item(tests.IrisTest): - def setUp(self): +class Test__get_prepared_item: + @pytest.fixture(autouse=True) + def _setup(self, mocker): PreparedItem = namedtuple("PreparedItem", ["metadata"]) self.resolve = Resolve() - self.prepared_dim_metadata_src = sentinel.prepared_dim_metadata_src - self.prepared_dim_metadata_tgt = sentinel.prepared_dim_metadata_tgt + self.prepared_dim_metadata_src = mocker.sentinel.prepared_dim_metadata_src + self.prepared_dim_metadata_tgt = mocker.sentinel.prepared_dim_metadata_tgt self.prepared_items_dim = PreparedItem( metadata=_PreparedMetadata( combined=None, @@ -4013,8 +4048,8 @@ def setUp(self): tgt=self.prepared_dim_metadata_tgt, ) ) - self.prepared_aux_metadata_src = sentinel.prepared_aux_metadata_src - self.prepared_aux_metadata_tgt = sentinel.prepared_aux_metadata_tgt + self.prepared_aux_metadata_src = mocker.sentinel.prepared_aux_metadata_src + self.prepared_aux_metadata_tgt = mocker.sentinel.prepared_aux_metadata_tgt self.prepared_items_aux = PreparedItem( metadata=_PreparedMetadata( combined=None, @@ -4022,8 +4057,8 @@ def setUp(self): tgt=self.prepared_aux_metadata_tgt, ) ) - self.prepared_scalar_metadata_src = sentinel.prepared_scalar_metadata_src - self.prepared_scalar_metadata_tgt = sentinel.prepared_scalar_metadata_tgt + self.prepared_scalar_metadata_src = mocker.sentinel.prepared_scalar_metadata_src + self.prepared_scalar_metadata_tgt = mocker.sentinel.prepared_scalar_metadata_tgt self.prepared_items_scalar = PreparedItem( metadata=_PreparedMetadata( combined=None, @@ -4037,13 +4072,13 @@ def setUp(self): items_scalar=[self.prepared_items_scalar], ) self.resolve.mapping = {0: 10} - self.m_create_prepared_item = self.patch( + self.m_create_prepared_item = mocker.patch( "iris.common.resolve.Resolve._create_prepared_item" ) - self.local_dim_metadata = sentinel.local_dim_metadata - self.local_aux_metadata = sentinel.local_aux_metadata - self.local_scalar_metadata = sentinel.local_scalar_metadata - self.local_coord = sentinel.local_coord + self.local_dim_metadata = mocker.sentinel.local_dim_metadata + self.local_aux_metadata = mocker.sentinel.local_aux_metadata + self.local_scalar_metadata = mocker.sentinel.local_scalar_metadata + self.local_coord = mocker.sentinel.local_coord self.local_coord_dims = (0,) self.local_items_dim = _Item( metadata=self.local_dim_metadata, @@ -4066,25 +4101,25 @@ def setUp(self): items_scalar=[self.local_items_scalar], ) - def test_missing_prepared_coord__from_src(self): - metadata = sentinel.missing + def test_missing_prepared_coord__from_src(self, mocker): + metadata = mocker.sentinel.missing category_local = None result = self.resolve._get_prepared_item(metadata, category_local) - self.assertIsNone(result) + assert result is None - def test_missing_prepared_coord__from_tgt(self): - metadata = sentinel.missing + def test_missing_prepared_coord__from_tgt(self, mocker): + metadata = mocker.sentinel.missing category_local = None result = self.resolve._get_prepared_item( metadata, category_local, from_src=False ) - self.assertIsNone(result) + assert result is None def test_get_prepared_dim_coord__from_src(self): metadata = self.prepared_dim_metadata_src category_local = None result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_dim, result) + assert result == self.prepared_items_dim def test_get_prepared_dim_coord__from_tgt(self): metadata = self.prepared_dim_metadata_tgt @@ -4092,13 +4127,13 @@ def test_get_prepared_dim_coord__from_tgt(self): result = self.resolve._get_prepared_item( metadata, category_local, from_src=False ) - self.assertEqual(self.prepared_items_dim, result) + assert result == self.prepared_items_dim def test_get_prepared_aux_coord__from_src(self): metadata = self.prepared_aux_metadata_src category_local = None result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_aux, result) + assert result == self.prepared_items_aux def test_get_prepared_aux_coord__from_tgt(self): metadata = self.prepared_aux_metadata_tgt @@ -4106,13 +4141,13 @@ def test_get_prepared_aux_coord__from_tgt(self): result = self.resolve._get_prepared_item( metadata, category_local, from_src=False ) - self.assertEqual(self.prepared_items_aux, result) + assert result == self.prepared_items_aux def test_get_prepared_scalar_coord__from_src(self): metadata = self.prepared_scalar_metadata_src category_local = None result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_scalar, result) + assert result == self.prepared_items_scalar def test_get_prepared_scalar_coord__from_tgt(self): metadata = self.prepared_scalar_metadata_tgt @@ -4120,163 +4155,164 @@ def test_get_prepared_scalar_coord__from_tgt(self): result = self.resolve._get_prepared_item( metadata, category_local, from_src=False ) - self.assertEqual(self.prepared_items_scalar, result) + assert result == self.prepared_items_scalar - def test_missing_local_coord__from_src(self): - metadata = sentinel.missing + def test_missing_local_coord__from_src(self, mocker): + metadata = mocker.sentinel.missing result = self.resolve._get_prepared_item( metadata, self.category_local, from_local=True ) - self.assertIsNone(result) + assert result is None - def test_missing_local_coord__from_tgt(self): - metadata = sentinel.missing + def test_missing_local_coord__from_tgt(self, mocker): + metadata = mocker.sentinel.missing result = self.resolve._get_prepared_item( metadata, self.category_local, from_src=False, from_local=True ) - self.assertIsNone(result) + assert result is None - def test_get_local_dim_coord__from_src(self): - created_local_item = sentinel.created_local_item + def test_get_local_dim_coord__from_src(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_dim_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_dim) == 2 + assert self.resolve.prepared_category.items_dim[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = (self.resolve.mapping[self.local_coord_dims[0]],) expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=metadata, tgt_metadata=None, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_get_local_dim_coord__from_tgt(self): - created_local_item = sentinel.created_local_item + def test_get_local_dim_coord__from_tgt(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_dim_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_src=False, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_dim) == 2 + assert self.resolve.prepared_category.items_dim[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = self.local_coord_dims expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=None, tgt_metadata=metadata, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_get_local_aux_coord__from_src(self): - created_local_item = sentinel.created_local_item + def test_get_local_aux_coord__from_src(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_aux_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_aux) == 2 + assert self.resolve.prepared_category.items_aux[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = (self.resolve.mapping[self.local_coord_dims[0]],) expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=metadata, tgt_metadata=None, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_get_local_aux_coord__from_tgt(self): - created_local_item = sentinel.created_local_item + def test_get_local_aux_coord__from_tgt(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_aux_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_src=False, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_aux) == 2 + assert self.resolve.prepared_category.items_aux[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = self.local_coord_dims expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=None, tgt_metadata=metadata, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_get_local_scalar_coord__from_src(self): - created_local_item = sentinel.created_local_item + def test_get_local_scalar_coord__from_src(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_scalar_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual(expected, self.resolve.prepared_category.items_scalar[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_scalar) == 2 + assert self.resolve.prepared_category.items_scalar[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = (self.resolve.mapping[self.local_coord_dims[0]],) expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=metadata, tgt_metadata=None, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected - def test_get_local_scalar_coord__from_tgt(self): - created_local_item = sentinel.created_local_item + def test_get_local_scalar_coord__from_tgt(self, mocker): + created_local_item = mocker.sentinel.created_local_item self.m_create_prepared_item.return_value = created_local_item metadata = self.local_scalar_metadata result = self.resolve._get_prepared_item( metadata, self.category_local, from_src=False, from_local=True ) expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual(expected, self.resolve.prepared_category.items_scalar[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) + assert result == expected + assert len(self.resolve.prepared_category.items_scalar) == 2 + assert self.resolve.prepared_category.items_scalar[1] == expected + assert self.m_create_prepared_item.call_count == 1 dims = self.local_coord_dims expected = [ - mock.call( + mocker.call( self.local_coord, dims, src_metadata=None, tgt_metadata=metadata, ) ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) + assert self.m_create_prepared_item.call_args_list == expected -class Test_cube(tests.IrisTest): - def setUp(self): +class Test_cube: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.shape = (2, 3) self.data = np.zeros(np.multiply(*self.shape), dtype=np.int8).reshape( self.shape @@ -4299,25 +4335,25 @@ def setUp(self): rhs_cube = Cube(self.data) rhs_cube.metadata = self.cube_metadata self.resolve.rhs_cube = rhs_cube - self.m_add_dim_coord = self.patch("iris.cube.Cube.add_dim_coord") - self.m_add_aux_coord = self.patch("iris.cube.Cube.add_aux_coord") - self.m_add_aux_factory = self.patch("iris.cube.Cube.add_aux_factory") - self.m_coord = self.patch("iris.cube.Cube.coord") + self.m_add_dim_coord = mocker.patch("iris.cube.Cube.add_dim_coord") + self.m_add_aux_coord = mocker.patch("iris.cube.Cube.add_aux_coord") + self.m_add_aux_factory = mocker.patch("iris.cube.Cube.add_aux_factory") + self.m_coord = mocker.patch("iris.cube.Cube.coord") # # prepared coordinates # prepared_category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) # prepared dim coordinates self.prepared_dim_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_dim_0_metadata_combined, + combined=mocker.sentinel.prepared_dim_0_metadata_combined, src=None, tgt=None, ) - self.prepared_dim_0_points = sentinel.prepared_dim_0_points - self.prepared_dim_0_bounds = sentinel.prepared_dim_0_bounds + self.prepared_dim_0_points = mocker.sentinel.prepared_dim_0_points + self.prepared_dim_0_bounds = mocker.sentinel.prepared_dim_0_bounds self.prepared_dim_0_dims = (0,) - self.prepared_dim_0_coord = mock.Mock(metadata=None) - self.prepared_dim_0_container = mock.Mock( + self.prepared_dim_0_coord = mocker.Mock(metadata=None) + self.prepared_dim_0_container = mocker.Mock( return_value=self.prepared_dim_0_coord ) self.prepared_dim_0 = _PreparedItem( @@ -4329,15 +4365,15 @@ def setUp(self): ) prepared_category.items_dim.append(self.prepared_dim_0) self.prepared_dim_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_dim_1_metadata_combined, + combined=mocker.sentinel.prepared_dim_1_metadata_combined, src=None, tgt=None, ) - self.prepared_dim_1_points = sentinel.prepared_dim_1_points - self.prepared_dim_1_bounds = sentinel.prepared_dim_1_bounds + self.prepared_dim_1_points = mocker.sentinel.prepared_dim_1_points + self.prepared_dim_1_bounds = mocker.sentinel.prepared_dim_1_bounds self.prepared_dim_1_dims = (1,) - self.prepared_dim_1_coord = mock.Mock(metadata=None) - self.prepared_dim_1_container = mock.Mock( + self.prepared_dim_1_coord = mocker.Mock(metadata=None) + self.prepared_dim_1_container = mocker.Mock( return_value=self.prepared_dim_1_coord ) self.prepared_dim_1 = _PreparedItem( @@ -4351,15 +4387,15 @@ def setUp(self): # prepared auxiliary coordinates self.prepared_aux_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_aux_0_metadata_combined, + combined=mocker.sentinel.prepared_aux_0_metadata_combined, src=None, tgt=None, ) - self.prepared_aux_0_points = sentinel.prepared_aux_0_points - self.prepared_aux_0_bounds = sentinel.prepared_aux_0_bounds + self.prepared_aux_0_points = mocker.sentinel.prepared_aux_0_points + self.prepared_aux_0_bounds = mocker.sentinel.prepared_aux_0_bounds self.prepared_aux_0_dims = (0,) - self.prepared_aux_0_coord = mock.Mock(metadata=None) - self.prepared_aux_0_container = mock.Mock( + self.prepared_aux_0_coord = mocker.Mock(metadata=None) + self.prepared_aux_0_container = mocker.Mock( return_value=self.prepared_aux_0_coord ) self.prepared_aux_0 = _PreparedItem( @@ -4371,15 +4407,15 @@ def setUp(self): ) prepared_category.items_aux.append(self.prepared_aux_0) self.prepared_aux_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_aux_1_metadata_combined, + combined=mocker.sentinel.prepared_aux_1_metadata_combined, src=None, tgt=None, ) - self.prepared_aux_1_points = sentinel.prepared_aux_1_points - self.prepared_aux_1_bounds = sentinel.prepared_aux_1_bounds + self.prepared_aux_1_points = mocker.sentinel.prepared_aux_1_points + self.prepared_aux_1_bounds = mocker.sentinel.prepared_aux_1_bounds self.prepared_aux_1_dims = (1,) - self.prepared_aux_1_coord = mock.Mock(metadata=None) - self.prepared_aux_1_container = mock.Mock( + self.prepared_aux_1_coord = mocker.Mock(metadata=None) + self.prepared_aux_1_container = mocker.Mock( return_value=self.prepared_aux_1_coord ) self.prepared_aux_1 = _PreparedItem( @@ -4393,15 +4429,15 @@ def setUp(self): # prepare scalar coordinates self.prepared_scalar_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_scalar_0_metadata_combined, + combined=mocker.sentinel.prepared_scalar_0_metadata_combined, src=None, tgt=None, ) - self.prepared_scalar_0_points = sentinel.prepared_scalar_0_points - self.prepared_scalar_0_bounds = sentinel.prepared_scalar_0_bounds + self.prepared_scalar_0_points = mocker.sentinel.prepared_scalar_0_points + self.prepared_scalar_0_bounds = mocker.sentinel.prepared_scalar_0_bounds self.prepared_scalar_0_dims = () - self.prepared_scalar_0_coord = mock.Mock(metadata=None) - self.prepared_scalar_0_container = mock.Mock( + self.prepared_scalar_0_coord = mocker.Mock(metadata=None) + self.prepared_scalar_0_container = mocker.Mock( return_value=self.prepared_scalar_0_coord ) self.prepared_scalar_0 = _PreparedItem( @@ -4413,15 +4449,15 @@ def setUp(self): ) prepared_category.items_scalar.append(self.prepared_scalar_0) self.prepared_scalar_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_scalar_1_metadata_combined, + combined=mocker.sentinel.prepared_scalar_1_metadata_combined, src=None, tgt=None, ) - self.prepared_scalar_1_points = sentinel.prepared_scalar_1_points - self.prepared_scalar_1_bounds = sentinel.prepared_scalar_1_bounds + self.prepared_scalar_1_points = mocker.sentinel.prepared_scalar_1_points + self.prepared_scalar_1_bounds = mocker.sentinel.prepared_scalar_1_bounds self.prepared_scalar_1_dims = () - self.prepared_scalar_1_coord = mock.Mock(metadata=None) - self.prepared_scalar_1_container = mock.Mock( + self.prepared_scalar_1_coord = mocker.Mock(metadata=None) + self.prepared_scalar_1_container = mocker.Mock( return_value=self.prepared_scalar_1_coord ) self.prepared_scalar_1 = _PreparedItem( @@ -4436,20 +4472,20 @@ def setUp(self): # prepared factories # prepared_factories = [] - self.aux_factory = sentinel.aux_factory - self.prepared_factory_container = mock.Mock(return_value=self.aux_factory) + self.aux_factory = mocker.sentinel.aux_factory + self.prepared_factory_container = mocker.Mock(return_value=self.aux_factory) self.prepared_factory_metadata_a = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_a_combined, + combined=mocker.sentinel.prepared_factory_metadata_a_combined, src=None, tgt=None, ) self.prepared_factory_metadata_b = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_b_combined, + combined=mocker.sentinel.prepared_factory_metadata_b_combined, src=None, tgt=None, ) self.prepared_factory_metadata_c = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_c_combined, + combined=mocker.sentinel.prepared_factory_metadata_c_combined, src=None, tgt=None, ) @@ -4464,125 +4500,121 @@ def setUp(self): ) prepared_factories.append(self.prepared_factory) self.prepared_factory_side_effect = ( - sentinel.prepared_factory_coord_a, - sentinel.prepared_factory_coord_b, - sentinel.prepared_factory_coord_c, + mocker.sentinel.prepared_factory_coord_a, + mocker.sentinel.prepared_factory_coord_b, + mocker.sentinel.prepared_factory_coord_c, ) self.m_coord.side_effect = self.prepared_factory_side_effect self.resolve.prepared_category = prepared_category self.resolve.prepared_factories = prepared_factories # Required to stop mock 'containers' failing in an 'issubclass' call. - self.patch("iris.common.resolve.issubclass", mock.Mock(return_value=False)) + mocker.patch("iris.common.resolve.issubclass", mocker.Mock(return_value=False)) def test_no_resolved_shape(self): self.resolve._broadcast_shape = None data = None emsg = "Cannot resolve resultant cube, as no candidate cubes have been provided" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve.cube(data) def test_bad_data_shape(self): emsg = "Cannot resolve resultant cube, as the provided data must have shape" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve.cube(self.bad_data) def test_bad_data_shape__inplace(self): self.resolve.lhs_cube = Cube(self.bad_data) emsg = "Cannot resolve resultant cube in-place" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): _ = self.resolve.cube(self.data, in_place=True) def _check(self): # check dim coordinate 0 - self.assertEqual(1, self.prepared_dim_0.container.call_count) + assert self.prepared_dim_0.container.call_count == 1 expected = [ mock.call(self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds) ] - self.assertEqual(expected, self.prepared_dim_0.container.call_args_list) - self.assertEqual( - self.prepared_dim_0_coord.metadata, - self.prepared_dim_0_metadata.combined, + assert self.prepared_dim_0.container.call_args_list == expected + assert ( + self.prepared_dim_0_metadata.combined == self.prepared_dim_0_coord.metadata ) # check dim coordinate 1 - self.assertEqual(1, self.prepared_dim_1.container.call_count) + assert self.prepared_dim_1.container.call_count == 1 expected = [ mock.call(self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds) ] - self.assertEqual(expected, self.prepared_dim_1.container.call_args_list) - self.assertEqual( - self.prepared_dim_1_coord.metadata, - self.prepared_dim_1_metadata.combined, + assert self.prepared_dim_1.container.call_args_list == expected + assert ( + self.prepared_dim_1_metadata.combined == self.prepared_dim_1_coord.metadata ) # check add_dim_coord - self.assertEqual(2, self.m_add_dim_coord.call_count) + assert self.m_add_dim_coord.call_count == 2 expected = [ mock.call(self.prepared_dim_0_coord, self.prepared_dim_0_dims), mock.call(self.prepared_dim_1_coord, self.prepared_dim_1_dims), ] - self.assertEqual(expected, self.m_add_dim_coord.call_args_list) + assert self.m_add_dim_coord.call_args_list == expected # check aux coordinate 0 - self.assertEqual(1, self.prepared_aux_0.container.call_count) + assert self.prepared_aux_0.container.call_count == 1 expected = [ mock.call(self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds) ] - self.assertEqual(expected, self.prepared_aux_0.container.call_args_list) - self.assertEqual( - self.prepared_aux_0_coord.metadata, - self.prepared_aux_0_metadata.combined, + assert self.prepared_aux_0.container.call_args_list == expected + assert ( + self.prepared_aux_0_metadata.combined == self.prepared_aux_0_coord.metadata ) # check aux coordinate 1 - self.assertEqual(1, self.prepared_aux_1.container.call_count) + assert self.prepared_aux_1.container.call_count == 1 expected = [ mock.call(self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds) ] - self.assertEqual(expected, self.prepared_aux_1.container.call_args_list) - self.assertEqual( - self.prepared_aux_1_coord.metadata, - self.prepared_aux_1_metadata.combined, + assert self.prepared_aux_1.container.call_args_list == expected + assert ( + self.prepared_aux_1_metadata.combined == self.prepared_aux_1_coord.metadata ) # check scalar coordinate 0 - self.assertEqual(1, self.prepared_scalar_0.container.call_count) + assert self.prepared_scalar_0.container.call_count == 1 expected = [ mock.call( self.prepared_scalar_0_points, bounds=self.prepared_scalar_0_bounds, ) ] - self.assertEqual(expected, self.prepared_scalar_0.container.call_args_list) - self.assertEqual( - self.prepared_scalar_0_coord.metadata, - self.prepared_scalar_0_metadata.combined, + assert self.prepared_scalar_0.container.call_args_list == expected + assert ( + self.prepared_scalar_0_metadata.combined + == self.prepared_scalar_0_coord.metadata ) # check scalar coordinate 1 - self.assertEqual(1, self.prepared_scalar_1.container.call_count) + assert self.prepared_scalar_1.container.call_count == 1 expected = [ mock.call( self.prepared_scalar_1_points, bounds=self.prepared_scalar_1_bounds, ) ] - self.assertEqual(expected, self.prepared_scalar_1.container.call_args_list) - self.assertEqual( - self.prepared_scalar_1_coord.metadata, - self.prepared_scalar_1_metadata.combined, + assert self.prepared_scalar_1.container.call_args_list == expected + assert ( + self.prepared_scalar_1_metadata.combined + == self.prepared_scalar_1_coord.metadata ) # check add_aux_coord - self.assertEqual(4, self.m_add_aux_coord.call_count) + assert self.m_add_aux_coord.call_count == 4 expected = [ mock.call(self.prepared_aux_0_coord, self.prepared_aux_0_dims), mock.call(self.prepared_aux_1_coord, self.prepared_aux_1_dims), mock.call(self.prepared_scalar_0_coord, self.prepared_scalar_0_dims), mock.call(self.prepared_scalar_1_coord, self.prepared_scalar_1_dims), ] - self.assertEqual(expected, self.m_add_aux_coord.call_args_list) + assert self.m_add_aux_coord.call_args_list == expected # check auxiliary factories - self.assertEqual(1, self.m_add_aux_factory.call_count) + assert self.m_add_aux_factory.call_count == 1 expected = [mock.call(self.aux_factory)] - self.assertEqual(expected, self.m_add_aux_factory.call_args_list) - self.assertEqual(1, self.prepared_factory_container.call_count) + assert self.m_add_aux_factory.call_args_list == expected + assert self.prepared_factory_container.call_count == 1 expected = [ mock.call( **{ @@ -4594,27 +4626,23 @@ def _check(self): } ) ] - self.assertEqual(expected, self.prepared_factory_container.call_args_list) - self.assertEqual(3, self.m_coord.call_count) + assert self.prepared_factory_container.call_args_list == expected + assert self.m_coord.call_count == 3 expected = [ mock.call(self.prepared_factory_metadata_a.combined), mock.call(self.prepared_factory_metadata_b.combined), mock.call(self.prepared_factory_metadata_c.combined), ] - self.assertEqual(expected, self.m_coord.call_args_list) + assert self.m_coord.call_args_list == expected def test_resolve(self): result = self.resolve.cube(self.data) - self.assertEqual(self.cube_metadata, result.metadata) + assert result.metadata == self.cube_metadata self._check() - self.assertIsNot(self.resolve.lhs_cube, result) + assert self.resolve.lhs_cube is not result def test_resolve__inplace(self): result = self.resolve.cube(self.data, in_place=True) - self.assertEqual(self.cube_metadata, result.metadata) + assert result.metadata == self.cube_metadata self._check() - self.assertIs(self.resolve.lhs_cube, result) - - -if __name__ == "__main__": - tests.main() + assert self.resolve.lhs_cube is result diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index a148b6fdbd..7c8eaa4c2f 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -6,10 +6,11 @@ # import iris tests first so that some things can be initialised # before importing anything else. -import iris.tests as tests # isort:skip +from dataclasses import dataclass from cf_units import Unit import numpy as np +import pytest from iris._concatenate import _CubeSignature as CubeSignature from iris.coords import DimCoord @@ -17,65 +18,73 @@ from iris.util import new_axis -class Test__coordinate_dim_metadata_equality(tests.IrisTest): - def setUp(self): +class Test__coordinate_dim_metadata_equality: + @pytest.fixture() + def sample_data(self): + # Return a standard set of test items, wrapped in a data object + @dataclass + class SampleData: + series_inc: CubeSignature = None + series_inc_cube: Cube = None + series_dec: CubeSignature = None + series_dec_cube: Cube = None + scalar_cube: Cube = None + + data = SampleData() + nt = 10 - data = np.arange(nt, dtype=np.float32) - cube = Cube(data, standard_name="air_temperature", units="K") + cube_data = np.arange(nt, dtype=np.float32) + cube = Cube(cube_data, standard_name="air_temperature", units="K") # Temporal coordinate. t_units = Unit("hours since 1970-01-01 00:00:00", calendar="standard") t_coord = DimCoord(points=np.arange(nt), standard_name="time", units=t_units) cube.add_dim_coord(t_coord, 0) - # Increasing 1D time-series cube. - self.series_inc_cube = cube - self.series_inc = CubeSignature(self.series_inc_cube) + data.series_inc_cube = cube + data.series_inc = CubeSignature(data.series_inc_cube) # Decreasing 1D time-series cube. - self.series_dec_cube = self.series_inc_cube.copy() - self.series_dec_cube.remove_coord("time") + data.series_dec_cube = data.series_inc_cube.copy() + data.series_dec_cube.remove_coord("time") t_tmp = DimCoord( points=t_coord.points[::-1], standard_name="time", units=t_units ) - self.series_dec_cube.add_dim_coord(t_tmp, 0) - self.series_dec = CubeSignature(self.series_dec_cube) + data.series_dec_cube.add_dim_coord(t_tmp, 0) + data.series_dec = CubeSignature(data.series_dec_cube) # Scalar 0D time-series cube with scalar time coordinate. cube = Cube(0, standard_name="air_temperature", units="K") cube.add_aux_coord(DimCoord(points=nt, standard_name="time", units=t_units)) - self.scalar_cube = cube + data.scalar_cube = cube + return data - def test_scalar_non_common_axis(self): - scalar = CubeSignature(self.scalar_cube) - self.assertNotEqual(self.series_inc.dim_metadata, scalar.dim_metadata) - self.assertNotEqual(self.series_dec.dim_metadata, scalar.dim_metadata) + def test_scalar_non_common_axis(self, sample_data): + scalar = CubeSignature(sample_data.scalar_cube) + assert sample_data.series_inc.dim_metadata != scalar.dim_metadata + assert sample_data.series_dec.dim_metadata != scalar.dim_metadata - def test_1d_single_value_common_axis(self): + def test_1d_single_value_common_axis(self, sample_data): # Manually promote scalar time cube to be a 1d cube. - single = CubeSignature(new_axis(self.scalar_cube, "time")) - self.assertEqual(self.series_inc.dim_metadata, single.dim_metadata) - self.assertEqual(self.series_dec.dim_metadata, single.dim_metadata) + single = CubeSignature(new_axis(sample_data.scalar_cube, "time")) + assert sample_data.series_inc.dim_metadata == single.dim_metadata + assert sample_data.series_dec.dim_metadata == single.dim_metadata - def test_increasing_common_axis(self): - series_inc = self.series_inc - series_dec = self.series_dec - self.assertEqual(series_inc.dim_metadata, series_inc.dim_metadata) - self.assertNotEqual(series_inc.dim_metadata, series_dec.dim_metadata) + def test_increasing_common_axis(self, sample_data): + series_inc = sample_data.series_inc + series_dec = sample_data.series_dec + assert series_inc.dim_metadata == series_inc.dim_metadata + assert series_inc.dim_metadata != series_dec.dim_metadata - def test_decreasing_common_axis(self): - series_inc = self.series_inc - series_dec = self.series_dec - self.assertNotEqual(series_dec.dim_metadata, series_inc.dim_metadata) - self.assertEqual(series_dec.dim_metadata, series_dec.dim_metadata) + def test_decreasing_common_axis(self, sample_data): + series_inc = sample_data.series_inc + series_dec = sample_data.series_dec + assert series_dec.dim_metadata != series_inc.dim_metadata + assert series_dec.dim_metadata == series_dec.dim_metadata - def test_circular(self): - series_inc = self.series_inc - circular_cube = self.series_inc_cube.copy() + def test_circular(self, sample_data): + series_inc = sample_data.series_inc + circular_cube = sample_data.series_inc_cube.copy() circular_cube.coord("time").circular = True circular = CubeSignature(circular_cube) - self.assertNotEqual(circular.dim_metadata, series_inc.dim_metadata) - self.assertEqual(circular.dim_metadata, circular.dim_metadata) - - -if __name__ == "__main__": - tests.main() + assert circular.dim_metadata != series_inc.dim_metadata + assert circular.dim_metadata == circular.dim_metadata diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 59312a542d..448ffb5e7b 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -4,13 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._concatenate.concatenate.py`.""" -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import cf_units import numpy as np import numpy.ma as ma +import pytest from iris._concatenate import concatenate from iris._lazy_data import as_lazy_data @@ -18,12 +15,19 @@ import iris.coords import iris.cube from iris.exceptions import ConcatenateError +import iris.warnings -class TestEpoch(tests.IrisTest): - def simple_1d_time_cubes(self, reftimes, coords_points): - cubes = [] +class TestEpoch: + @pytest.fixture() + def simple_1d_time_cubes(self): + reftimes = [ + "hours since 1970-01-01 00:00:00", + "hours since 1970-01-01 00:00:00", + ] + coords_points = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]] data_points = [273, 275, 278, 277, 274] + cubes = [] for reftime, coord_points in zip(reftimes, coords_points): cube = iris.cube.Cube( np.array(data_points, dtype=np.float32), @@ -40,20 +44,21 @@ def simple_1d_time_cubes(self, reftimes, coords_points): cubes.append(cube) return cubes - def test_concat_1d_with_same_time_units(self): - reftimes = [ - "hours since 1970-01-01 00:00:00", - "hours since 1970-01-01 00:00:00", - ] - coords_points = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]] - cubes = self.simple_1d_time_cubes(reftimes, coords_points) - result = concatenate(cubes) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (10,)) + def test_concat_1d_with_same_time_units(self, simple_1d_time_cubes): + result = concatenate(simple_1d_time_cubes) + assert len(result) == 1 + assert result[0].shape == (10,) + +class _MessagesMixin: + @pytest.fixture() + def placeholder(self): + # Shim to allow sample_cubes to have identical signature in both parent and subclasses + return [] -class _MessagesMixin(tests.IrisTest): - def setUp(self): + @pytest.fixture() + def sample_cubes(self, placeholder): + # Construct and return a pair of identical cubes data = np.arange(24, dtype=np.float32).reshape(2, 3, 4) cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") # Time coord @@ -106,127 +111,103 @@ def setUp(self): cube.add_aux_coord(sigma, ()) cube.add_aux_coord(orog, ()) cube.add_aux_factory(HybridHeightFactory(delta, sigma, orog)) - self.cube = cube + # Return a list with two identical cubes + return [cube, cube.copy()] + def test_definition_difference_message(self, sample_cubes): + sample_cubes[1].units = "1" + exc_regexp = "Cube metadata differs for phenomenon:" + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) -class TestMessages(_MessagesMixin): - def setUp(self): - super().setUp() - def test_dim_coords_same_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() +class TestMessages(_MessagesMixin): + def test_dim_coords_same_message(self, sample_cubes): exc_regexp = "Cannot find an axis to concatenate over for phenomenon *" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_definition_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.units = "1" + def test_definition_difference_message(self, sample_cubes): + sample_cubes[1].units = "1" exc_regexp = "Cube metadata differs for phenomenon: *" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_dimensions_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("latitude") + def test_dimensions_difference_message(self, sample_cubes): + sample_cubes[1].remove_coord("latitude") exc_regexp = "Dimension coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_dimensions_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("latitude").long_name = "bob" + def test_dimensions_metadata_difference_message(self, sample_cubes): + sample_cubes[1].coord("latitude").long_name = "bob" exc_regexp = "Dimension coordinates metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_aux_coords_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("foo") + def test_aux_coords_difference_message(self, sample_cubes): + sample_cubes[1].remove_coord("foo") exc_regexp = "Auxiliary coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_aux_coords_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("foo").units = "m" + def test_aux_coords_metadata_difference_message(self, sample_cubes): + sample_cubes[1].coord("foo").units = "m" exc_regexp = "Auxiliary coordinates metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_scalar_coords_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("height") + def test_scalar_coords_difference_message(self, sample_cubes): + sample_cubes[1].remove_coord("height") exc_regexp = "Scalar coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_scalar_coords_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("height").long_name = "alice" + def test_scalar_coords_metadata_difference_message(self, sample_cubes): + sample_cubes[1].coord("height").long_name = "alice" exc_regexp = "Scalar coordinates values or metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_cell_measure_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_cell_measure("bar") + def test_cell_measure_difference_message(self, sample_cubes): + sample_cubes[1].remove_cell_measure("bar") exc_regexp = "Cell measures differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_cell_measure_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.cell_measure("bar").units = "m" + def test_cell_measure_metadata_difference_message(self, sample_cubes): + sample_cubes[1].cell_measure("bar").units = "m" exc_regexp = "Cell measures metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_ancillary_variable_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_ancillary_variable("baz") + def test_ancillary_variable_difference_message(self, sample_cubes): + sample_cubes[1].remove_ancillary_variable("baz") exc_regexp = "Ancillary variables differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_ancillary_variable_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.ancillary_variable("baz").units = "m" + def test_ancillary_variable_metadata_difference_message(self, sample_cubes): + sample_cubes[1].ancillary_variable("baz").units = "m" exc_regexp = "Ancillary variables metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_derived_coord_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_aux_factory(cube_2.aux_factories[0]) + def test_derived_coord_difference_message(self, sample_cubes): + sample_cubes[1].remove_aux_factory(sample_cubes[1].aux_factories[0]) exc_regexp = "Derived coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_derived_coord_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.aux_factories[0].units = "km" + def test_derived_coord_metadata_difference_message(self, sample_cubes): + sample_cubes[1].aux_factories[0].units = "km" exc_regexp = "Derived coordinates metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_ndim_difference_message(self): - cube_1 = self.cube - cube_2 = iris.cube.Cube( + def test_ndim_difference_message(self, sample_cubes): + # Replace cube#2 with an entirely different thing + sample_cubes[1] = iris.cube.Cube( np.arange(5, dtype=np.float32), standard_name="air_temperature", units="K", @@ -236,73 +217,71 @@ def test_ndim_difference_message(self): standard_name="longitude", units="degrees", ) - cube_2.add_dim_coord(x_coord, 0) + sample_cubes[1].add_dim_coord(x_coord, 0) exc_regexp = "Data dimensions differ: [0-9] != [0-9]" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_datatype_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.data.dtype = np.float64 + def test_datatype_difference_message(self, sample_cubes): + sample_cubes[1].data.dtype = np.float64 exc_regexp = "Data types differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) - def test_dim_coords_overlap_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("time").points = np.arange(1, 3, dtype=np.float32) + def test_dim_coords_overlap_message(self, sample_cubes): + sample_cubes[1].coord("time").points = np.arange(1, 3, dtype=np.float32) exc_regexp = "Found cubes with overlap on concatenate axis" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) class TestNonMetadataMessages(_MessagesMixin): - def setUp(self): - super().setUp() - cube_2 = self.cube.copy() - cube_2.coord("time").points = cube_2.coord("time").points + 2 - self.cube_2 = cube_2 + parent_cubes = _MessagesMixin.sample_cubes + + @pytest.fixture() + def sample_cubes(self, parent_cubes): + coord = parent_cubes[1].coord("time") + parent_cubes[1].replace_coord(coord.copy(points=coord.points + 2)) + return parent_cubes - def test_aux_coords_diff_message(self): - self.cube_2.coord("foo").points = [3, 4, 5] + def test_aux_coords_diff_message(self, sample_cubes): + sample_cubes[1].coord("foo").points = [3, 4, 5] exc_regexp = "Auxiliary coordinates are unequal for phenomenon * " - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([self.cube, self.cube_2], True) - with self.assertWarnsRegex(iris.warnings.IrisUserWarning, exc_regexp): - _ = concatenate([self.cube, self.cube_2], False) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) + with pytest.warns(iris.warnings.IrisUserWarning, match=exc_regexp): + _ = concatenate(sample_cubes, False) - def test_cell_measures_diff_message(self): - self.cube_2.cell_measure("bar").data = [3, 4, 5] + def test_cell_measures_diff_message(self, sample_cubes): + sample_cubes[1].cell_measure("bar").data = [3, 4, 5] exc_regexp = "Cell measures are unequal for phenomenon * " - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([self.cube, self.cube_2], True) - with self.assertWarnsRegex(iris.warnings.IrisUserWarning, exc_regexp): - _ = concatenate([self.cube, self.cube_2], False) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) + with pytest.warns(iris.warnings.IrisUserWarning, match=exc_regexp): + _ = concatenate(sample_cubes, False) - def test_ancillary_variable_diff_message(self): - self.cube_2.ancillary_variable("baz").data = [3, 4, 5] + def test_ancillary_variable_diff_message(self, sample_cubes): + sample_cubes[1].ancillary_variable("baz").data = [3, 4, 5] exc_regexp = "Ancillary variables are unequal for phenomenon * " - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([self.cube, self.cube_2], True) - with self.assertWarnsRegex(iris.warnings.IrisUserWarning, exc_regexp): - _ = concatenate([self.cube, self.cube_2], False) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) + with pytest.warns(iris.warnings.IrisUserWarning, match=exc_regexp): + _ = concatenate(sample_cubes, False) - def test_derived_coords_diff_message(self): - self.cube_2.aux_factories[0].update(self.cube_2.coord("sigma"), None) + def test_derived_coords_diff_message(self, sample_cubes): + sample_cubes[1].aux_factories[0].update(sample_cubes[1].coord("sigma"), None) exc_regexp = "Derived coordinates are unequal for phenomenon * " - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([self.cube, self.cube_2], True) - with self.assertWarnsRegex(iris.warnings.IrisUserWarning, exc_regexp): - _ = concatenate([self.cube, self.cube_2], False) + with pytest.raises(ConcatenateError, match=exc_regexp): + _ = concatenate(sample_cubes, True) + with pytest.warns(iris.warnings.IrisUserWarning, match=exc_regexp): + _ = concatenate(sample_cubes, False) -class TestOrder(tests.IrisTest): +class TestOrder: def _make_cube(self, points, bounds=None): nx = 4 data = np.arange(len(points) * nx).reshape(len(points), nx) @@ -317,147 +296,154 @@ def test_asc_points(self): top = self._make_cube([10, 30, 50, 70, 90]) bottom = self._make_cube([-90, -70, -50, -30, -10]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_asc_bounds(self): top = self._make_cube([22.5, 67.5], [[0, 45], [45, 90]]) bottom = self._make_cube([-67.5, -22.5], [[-90, -45], [-45, 0]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_asc_points_with_singleton_ordered(self): top = self._make_cube([5]) bottom = self._make_cube([15, 25]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_asc_points_with_singleton_unordered(self): top = self._make_cube([25]) bottom = self._make_cube([5, 15]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_asc_bounds_with_singleton_ordered(self): top = self._make_cube([5], [[0, 10]]) bottom = self._make_cube([15, 25], [[10, 20], [20, 30]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_asc_bounds_with_singleton_unordered(self): top = self._make_cube([25], [[20, 30]]) bottom = self._make_cube([5, 15], [[0, 10], [10, 20]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_points(self): top = self._make_cube([90, 70, 50, 30, 10]) bottom = self._make_cube([-10, -30, -50, -70, -90]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_bounds(self): top = self._make_cube([67.5, 22.5], [[90, 45], [45, 0]]) bottom = self._make_cube([-22.5, -67.5], [[0, -45], [-45, -90]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_points_with_singleton_ordered(self): top = self._make_cube([25]) bottom = self._make_cube([15, 5]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_points_with_singleton_unordered(self): top = self._make_cube([5]) bottom = self._make_cube([25, 15]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_bounds_with_singleton_ordered(self): top = self._make_cube([25], [[30, 20]]) bottom = self._make_cube([15, 5], [[20, 10], [10, 0]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_desc_bounds_with_singleton_unordered(self): top = self._make_cube([5], [[10, 0]]) bottom = self._make_cube([25, 15], [[30, 20], [20, 10]]) result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) + assert len(result) == 1 def test_points_all_singleton(self): top = self._make_cube([5]) bottom = self._make_cube([15]) result1 = concatenate([top, bottom]) result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) + assert len(result1) == 1 + assert result1 == result2 def test_asc_bounds_all_singleton(self): top = self._make_cube([5], [0, 10]) bottom = self._make_cube([15], [10, 20]) result1 = concatenate([top, bottom]) result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) + assert len(result1) == 1 + assert result1 == result2 def test_desc_bounds_all_singleton(self): top = self._make_cube([5], [10, 0]) bottom = self._make_cube([15], [20, 10]) result1 = concatenate([top, bottom]) result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) - - -class TestConcatenate__dask(tests.IrisTest): - def build_lazy_cube(self, points, bounds=None, nx=4, aux_coords=False): - data = np.arange(len(points) * nx).reshape(len(points), nx) - data = as_lazy_data(data) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - lat = iris.coords.DimCoord(points, "latitude", bounds=bounds) - lon = iris.coords.DimCoord(np.arange(nx), "longitude") - cube.add_dim_coord(lat, 0) - cube.add_dim_coord(lon, 1) - if aux_coords: - bounds = np.arange(len(points) * nx * 4).reshape(len(points), nx, 4) - bounds = as_lazy_data(bounds) - aux_coord = iris.coords.AuxCoord(data, var_name="aux_coord", bounds=bounds) - cube.add_aux_coord(aux_coord, (0, 1)) - return cube + assert len(result1) == 1 + assert result1 == result2 + + +class TestConcatenate__dask: + @pytest.fixture() + def sample_lazy_cubes(self): + # Make a pair of concatenatable cubes, with dim points [1, 2] and [3, 4, 5] + def build_lazy_cube(points): + nx = 4 + data = np.arange(len(points) * nx).reshape(len(points), nx) + data = as_lazy_data(data) + cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") + lat = iris.coords.DimCoord(points, "latitude") + lon = iris.coords.DimCoord(np.arange(nx), "longitude") + cube.add_dim_coord(lat, 0) + cube.add_dim_coord(lon, 1) + return cube + + c1 = build_lazy_cube([1, 2]) + c2 = build_lazy_cube([3, 4, 5]) + return c1, c2 + + @staticmethod + def add_sample_auxcoord(cube): + # Augment a test cube by adding an aux-coord on the concatenation dimension + n_points, nx = cube.shape + bounds = np.arange(n_points * nx * 4).reshape(n_points, nx, 4) + bounds = as_lazy_data(bounds) + aux_coord = iris.coords.AuxCoord( + cube.core_data(), + bounds=bounds, + var_name="aux_coord", + ) + cube.add_aux_coord(aux_coord, (0, 1)) - def test_lazy_concatenate(self): - c1 = self.build_lazy_cube([1, 2]) - c2 = self.build_lazy_cube([3, 4, 5]) - (cube,) = concatenate([c1, c2]) - self.assertTrue(cube.has_lazy_data()) - self.assertFalse(ma.isMaskedArray(cube.data)) + def test_lazy_concatenate(self, sample_lazy_cubes): + (cube,) = concatenate(sample_lazy_cubes) + assert cube.has_lazy_data() + assert not ma.isMaskedArray(cube.data) - def test_lazy_concatenate_aux_coords(self): - c1 = self.build_lazy_cube([1, 2], aux_coords=True) - c2 = self.build_lazy_cube([3, 4, 5], aux_coords=True) + def test_lazy_concatenate_aux_coords(self, sample_lazy_cubes): + c1, c2 = sample_lazy_cubes + for cube in (c1, c2): + self.add_sample_auxcoord(cube) (result,) = concatenate([c1, c2]) - self.assertTrue(c1.coord("aux_coord").has_lazy_points()) - self.assertTrue(c1.coord("aux_coord").has_lazy_bounds()) + assert c1.coord("aux_coord").has_lazy_points() + assert c1.coord("aux_coord").has_lazy_bounds() - self.assertTrue(c2.coord("aux_coord").has_lazy_points()) - self.assertTrue(c2.coord("aux_coord").has_lazy_bounds()) + assert c2.coord("aux_coord").has_lazy_points() + assert c2.coord("aux_coord").has_lazy_bounds() - self.assertTrue(result.coord("aux_coord").has_lazy_points()) - self.assertTrue(result.coord("aux_coord").has_lazy_bounds()) + assert result.coord("aux_coord").has_lazy_points() + assert result.coord("aux_coord").has_lazy_bounds() - def test_lazy_concatenate_masked_array_mixed_deferred(self): - c1 = self.build_lazy_cube([1, 2]) - c2 = self.build_lazy_cube([3, 4, 5]) + def test_lazy_concatenate_masked_array_mixed_deferred(self, sample_lazy_cubes): + c1, c2 = sample_lazy_cubes c2.data = np.ma.masked_greater(c2.data, 3) (cube,) = concatenate([c1, c2]) - self.assertTrue(cube.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(cube.data)) - - -if __name__ == "__main__": - tests.main() + assert cube.has_lazy_data() + assert ma.isMaskedArray(cube.data) diff --git a/lib/iris/tests/unit/config/test_NetCDF.py b/lib/iris/tests/unit/config/test_NetCDF.py index 5b691a1dc3..7469ca8c79 100644 --- a/lib/iris/tests/unit/config/test_NetCDF.py +++ b/lib/iris/tests/unit/config/test_NetCDF.py @@ -4,41 +4,40 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.config.NetCDF` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - +import re import warnings +import pytest + import iris.config -class Test(tests.IrisTest): - def setUp(self): - self.options = iris.config.NetCDF() +@pytest.fixture +def options(): + return iris.config.NetCDF() + + +def test_basic(options): + assert not options.conventions_override - def test_basic(self): - self.assertFalse(self.options.conventions_override) - def test_enabled(self): - self.options.conventions_override = True - self.assertTrue(self.options.conventions_override) +def test_enabled(options): + options.conventions_override = True + assert options.conventions_override - def test_bad_value(self): - # A bad value should be ignored and replaced with the default value. - bad_value = "wibble" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - self.options.conventions_override = bad_value - self.assertFalse(self.options.conventions_override) - exp_wmsg = "Attempting to set invalid value {!r}".format(bad_value) - self.assertRegex(str(w[0].message), exp_wmsg) - def test__contextmgr(self): - with self.options.context(conventions_override=True): - self.assertTrue(self.options.conventions_override) - self.assertFalse(self.options.conventions_override) +def test_bad_value(options): + # A bad value should be ignored and replaced with the default value. + bad_value = "wibble" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + options.conventions_override = bad_value + assert not options.conventions_override + exp_wmsg = "Attempting to set invalid value {!r}".format(bad_value) + assert re.match(exp_wmsg, str(w[0].message)) -if __name__ == "__main__": - tests.main() +def test__contextmgr(options): + with options.context(conventions_override=True): + assert options.conventions_override + assert not options.conventions_override diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 8c36240fb6..1f01efd90f 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -35,43 +35,44 @@ CoordinateNotFoundError, UnitConversionError, ) +from iris.tests import _shared_utils import iris.tests.stock as stock from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube, sample_meshcoord from iris.warnings import IrisUserWarning, IrisVagueMetadataWarning -class Test___init___data(tests.IrisTest): +class Test___init___data: def test_ndarray(self): # np.ndarray should be allowed through data = np.arange(12).reshape(3, 4) cube = Cube(data) - self.assertEqual(type(cube.data), np.ndarray) - self.assertArrayEqual(cube.data, data) + assert type(cube.data) is np.ndarray + _shared_utils.assert_array_equal(cube.data, data) def test_masked(self): # ma.MaskedArray should be allowed through data = ma.masked_greater(np.arange(12).reshape(3, 4), 1) cube = Cube(data) - self.assertEqual(type(cube.data), ma.MaskedArray) - self.assertMaskedArrayEqual(cube.data, data) + assert type(cube.data) is ma.MaskedArray + _shared_utils.assert_masked_array_equal(cube.data, data) def test_masked_no_mask(self): # ma.MaskedArray should be allowed through even if it has no mask data = ma.masked_array(np.arange(12).reshape(3, 4), False) cube = Cube(data) - self.assertEqual(type(cube.data), ma.MaskedArray) - self.assertMaskedArrayEqual(cube.data, data) + assert type(cube.data) is ma.MaskedArray + _shared_utils.assert_masked_array_equal(cube.data, data) def test_matrix(self): # Subclasses of np.ndarray should be coerced back to np.ndarray. # (Except for np.ma.MaskedArray.) data = np.matrix([[1, 2, 3], [4, 5, 6]]) cube = Cube(data) - self.assertEqual(type(cube.data), np.ndarray) - self.assertArrayEqual(cube.data, data) + assert type(cube.data) is np.ndarray + _shared_utils.assert_array_equal(cube.data, data) -class Test_data_dtype_fillvalue(tests.IrisTest): +class Test_data_dtype_fillvalue: def _sample_data(self, dtype=("f4"), masked=False, fill_value=None, lazy=False): data = np.arange(6).reshape((2, 3)) dtype = np.dtype(dtype) @@ -94,74 +95,74 @@ def _sample_cube(self, dtype=("f4"), masked=False, fill_value=None, lazy=False): def test_realdata_change(self): # Check re-assigning real data. cube = self._sample_cube() - self.assertEqual(cube.dtype, np.float32) + assert cube.dtype == np.float32 new_dtype = np.dtype("i4") new_data = self._sample_data(dtype=new_dtype) cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) + assert cube.core_data() is new_data + assert cube.dtype == new_dtype def test_realmaskdata_change(self): # Check re-assigning real masked data. cube = self._sample_cube(masked=True, fill_value=1234) - self.assertEqual(cube.dtype, np.float32) + assert cube.dtype == np.float32 new_dtype = np.dtype("i4") new_fill_value = 4321 new_data = self._sample_data( masked=True, fill_value=new_fill_value, dtype=new_dtype ) cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - self.assertEqual(cube.data.fill_value, new_fill_value) + assert cube.core_data() is new_data + assert cube.dtype == new_dtype + assert cube.data.fill_value == new_fill_value def test_lazydata_change(self): # Check re-assigning lazy data. cube = self._sample_cube(lazy=True) - self.assertEqual(cube.core_data().dtype, np.float32) + assert cube.core_data().dtype == np.float32 new_dtype = np.dtype("f8") new_data = self._sample_data(new_dtype, lazy=True) cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) + assert cube.core_data() is new_data + assert cube.dtype == new_dtype def test_lazymaskdata_change(self): # Check re-assigning lazy masked data. cube = self._sample_cube(masked=True, fill_value=1234, lazy=True) - self.assertEqual(cube.core_data().dtype, np.float32) + assert cube.core_data().dtype == np.float32 new_dtype = np.dtype("f8") new_fill_value = 4321 new_data = self._sample_data( dtype=new_dtype, masked=True, fill_value=new_fill_value, lazy=True ) cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - self.assertEqual(cube.data.fill_value, new_fill_value) + assert cube.core_data() is new_data + assert cube.dtype == new_dtype + assert cube.data.fill_value == new_fill_value def test_lazydata_realise(self): # Check touching lazy data. cube = self._sample_cube(lazy=True) data = cube.data - self.assertIs(cube.core_data(), data) - self.assertEqual(cube.dtype, np.float32) + assert cube.core_data() is data + assert cube.dtype == np.float32 def test_lazymaskdata_realise(self): # Check touching masked lazy data. fill_value = 27.3 cube = self._sample_cube(masked=True, fill_value=fill_value, lazy=True) data = cube.data - self.assertIs(cube.core_data(), data) - self.assertEqual(cube.dtype, np.float32) - self.assertEqual(data.fill_value, np.float32(fill_value)) + assert cube.core_data() is data + assert cube.dtype == np.float32 + assert data.fill_value == np.float32(fill_value) def test_realmaskedconstantint_realise(self): masked_data = ma.masked_array([666], mask=True) masked_constant = masked_data[0] cube = Cube(masked_constant) data = cube.data - self.assertTrue(ma.isMaskedArray(data)) - self.assertNotIsInstance(data, ma.core.MaskedConstant) + assert ma.isMaskedArray(data) + assert not isinstance(data, ma.core.MaskedConstant) def test_lazymaskedconstantint_realise(self): dtype = np.dtype("i2") @@ -170,8 +171,8 @@ def test_lazymaskedconstantint_realise(self): masked_constant_lazy = as_lazy_data(masked_constant) cube = Cube(masked_constant_lazy) data = cube.data - self.assertTrue(ma.isMaskedArray(data)) - self.assertNotIsInstance(data, ma.core.MaskedConstant) + assert ma.isMaskedArray(data) + assert not isinstance(data, ma.core.MaskedConstant) def test_lazydata___getitem__dtype(self): fill_value = 1234 @@ -185,24 +186,24 @@ def test_lazydata___getitem__dtype(self): lazy_masked_array = as_lazy_data(masked_array) cube = Cube(lazy_masked_array) subcube = cube[3:] - self.assertEqual(subcube.dtype, dtype) - self.assertEqual(subcube.data.fill_value, fill_value) + assert subcube.dtype == dtype + assert subcube.data.fill_value == fill_value -class Test_extract(tests.IrisTest): +class Test_extract: def test_scalar_cube_exists(self): # Ensure that extract is able to extract a scalar cube. constraint = iris.Constraint(name="a1") cube = Cube(1, long_name="a1") res = cube.extract(constraint) - self.assertIs(res, cube) + assert res is cube def test_scalar_cube_noexists(self): # Ensure that extract does not return a non-matching scalar cube. constraint = iris.Constraint(name="a2") cube = Cube(1, long_name="a1") res = cube.extract(constraint) - self.assertIs(res, None) + assert res is None def test_scalar_cube_coord_match(self): # Ensure that extract is able to extract a scalar cube according to @@ -212,7 +213,7 @@ def test_scalar_cube_coord_match(self): coord = iris.coords.AuxCoord(0, long_name="scalar_coord") cube.add_aux_coord(coord, None) res = cube.extract(constraint) - self.assertIs(res, cube) + assert res is cube def test_scalar_cube_coord_nomatch(self): # Ensure that extract is not extracting a scalar cube with scalar @@ -222,30 +223,30 @@ def test_scalar_cube_coord_nomatch(self): coord = iris.coords.AuxCoord(0, long_name="scalar_coord") cube.add_aux_coord(coord, None) res = cube.extract(constraint) - self.assertIs(res, None) + assert res is None def test_1d_cube_exists(self): # Ensure that extract is able to extract from a 1d cube. constraint = iris.Constraint(name="a1") cube = Cube([1], long_name="a1") res = cube.extract(constraint) - self.assertIs(res, cube) + assert res is cube def test_1d_cube_noexists(self): # Ensure that extract does not return a non-matching 1d cube. constraint = iris.Constraint(name="a2") cube = Cube([1], long_name="a1") res = cube.extract(constraint) - self.assertIs(res, None) + assert res is None -class Test_xml(tests.IrisTest): - def test_checksum_ignores_masked_values(self): +class Test_xml: + def test_checksum_ignores_masked_values(self, request): # Mask out an single element. data = ma.arange(12).reshape(3, 4) data[1, 2] = ma.masked cube = Cube(data) - self.assertCML(cube) + _shared_utils.assert_CML(request, cube) # If we change the underlying value before masking it, the # checksum should be unaffected. @@ -253,21 +254,21 @@ def test_checksum_ignores_masked_values(self): data[1, 2] = 42 data[1, 2] = ma.masked cube = Cube(data) - self.assertCML(cube) + _shared_utils.assert_CML(request, cube) def test_byteorder_default(self): cube = Cube(np.arange(3)) - self.assertIn("byteorder", cube.xml()) + assert "byteorder" in cube.xml() def test_byteorder_false(self): cube = Cube(np.arange(3)) - self.assertNotIn("byteorder", cube.xml(byteorder=False)) + assert "byteorder" not in cube.xml(byteorder=False) def test_byteorder_true(self): cube = Cube(np.arange(3)) - self.assertIn("byteorder", cube.xml(byteorder=True)) + assert "byteorder" in cube.xml(byteorder=True) - def test_cell_measures(self): + def test_cell_measures(self, request): cube = stock.simple_3d_w_multidim_coords() cm_a = iris.coords.CellMeasure( np.zeros(cube.shape[-2:]), measure="area", units="1" @@ -280,19 +281,20 @@ def test_cell_measures(self): units="m3", ) cube.add_cell_measure(cm_v, (0, 1, 2)) - self.assertCML(cube) + _shared_utils.assert_CML(request, cube) - def test_ancils(self): + def test_ancils(self, request): cube = stock.simple_2d_w_multidim_coords() av = iris.coords.AncillaryVariable( np.zeros(cube.shape), long_name="xy", var_name="vxy", units="1" ) cube.add_ancillary_variable(av, (0, 1)) - self.assertCML(cube) + _shared_utils.assert_CML(request, cube) -class Test_collapsed__lazy(tests.IrisTest): - def setUp(self): +class Test_collapsed__lazy: + @pytest.fixture(autouse=True) + def _setup(self): self.data = np.arange(6.0).reshape((2, 3)) self.lazydata = as_lazy_data(self.data) cube = Cube(self.lazydata) @@ -304,50 +306,51 @@ def setUp(self): def test_dim0_lazy(self): cube_collapsed = self.cube.collapsed("y", MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.5, 2.5, 3.5]) - self.assertFalse(cube_collapsed.has_lazy_data()) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal(cube_collapsed.data, [1.5, 2.5, 3.5]) + assert not cube_collapsed.has_lazy_data() def test_dim0_lazy_weights_none(self): cube_collapsed = self.cube.collapsed("y", MEAN, weights=None) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.5, 2.5, 3.5]) - self.assertFalse(cube_collapsed.has_lazy_data()) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal(cube_collapsed.data, [1.5, 2.5, 3.5]) + assert not cube_collapsed.has_lazy_data() def test_dim1_lazy(self): cube_collapsed = self.cube.collapsed("x", MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.0, 4.0]) - self.assertFalse(cube_collapsed.has_lazy_data()) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal(cube_collapsed.data, [1.0, 4.0]) + assert not cube_collapsed.has_lazy_data() def test_dim1_lazy_weights_none(self): cube_collapsed = self.cube.collapsed("x", MEAN, weights=None) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.0, 4.0]) - self.assertFalse(cube_collapsed.has_lazy_data()) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal(cube_collapsed.data, [1.0, 4.0]) + assert not cube_collapsed.has_lazy_data() def test_multidims(self): # Check that MEAN works with multiple dims. cube_collapsed = self.cube.collapsed(("x", "y"), MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAllClose(cube_collapsed.data, 2.5) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_all_close(cube_collapsed.data, 2.5) def test_multidims_weights_none(self): # Check that MEAN works with multiple dims. cube_collapsed = self.cube.collapsed(("x", "y"), MEAN, weights=None) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAllClose(cube_collapsed.data, 2.5) + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_all_close(cube_collapsed.data, 2.5) def test_non_lazy_aggregator(self): # An aggregator which doesn't have a lazy function should still work. dummy_agg = Aggregator("custom_op", lambda x, axis=None: np.mean(x, axis=axis)) result = self.cube.collapsed("x", dummy_agg) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual(result.data, np.mean(self.data, axis=1)) + assert not result.has_lazy_data() + _shared_utils.assert_array_equal(result.data, np.mean(self.data, axis=1)) -class Test_collapsed__multidim_weighted_with_arr(tests.IrisTest): - def setUp(self): +class Test_collapsed__multidim_weighted_with_arr: + @pytest.fixture(autouse=True) + def _multidim_arr_setup(self): self.data = np.arange(6.0).reshape((2, 3)) self.lazydata = as_lazy_data(self.data) # Test cubes with (same-valued) real and lazy data @@ -376,86 +379,102 @@ def test_weighted_fullweights_real_y(self): cube_collapsed = self.cube_real.collapsed( "y", MEAN, weights=self.full_weights_y ) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_y + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_fullweights_lazy_y(self): # Full-shape weights, lazy data : Check lazy result, same values as real calc. cube_collapsed = self.cube_lazy.collapsed( "y", MEAN, weights=self.full_weights_y ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_y + ) + assert cube_collapsed.units == "kg m-2 s-1" def test_weighted_1dweights_real_y(self): # 1-D weights, real data : Check same results as full-shape. cube_collapsed = self.cube_real.collapsed("y", MEAN, weights=self.y_weights) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_y + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_1dweights_lazy_y(self): # 1-D weights, lazy data : Check lazy result, same values as real calc. cube_collapsed = self.cube_lazy.collapsed("y", MEAN, weights=self.y_weights) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_y + ) + assert cube_collapsed.units == "kg m-2 s-1" def test_weighted_fullweights_real_x(self): # Full weights, real data, ** collapse X ** : as for 'y' case above cube_collapsed = self.cube_real.collapsed( "x", MEAN, weights=self.full_weights_x ) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_x + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_fullweights_lazy_x(self): # Full weights, lazy data, ** collapse X ** : as for 'y' case above cube_collapsed = self.cube_lazy.collapsed( "x", MEAN, weights=self.full_weights_x ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_x + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_1dweights_real_x(self): # 1-D weights, real data, ** collapse X ** : as for 'y' case above cube_collapsed = self.cube_real.collapsed("x", MEAN, weights=self.x_weights) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_x + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_1dweights_lazy_x(self): # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above cube_collapsed = self.cube_lazy.collapsed("x", MEAN, weights=self.x_weights) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, self.expected_result_x) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + assert cube_collapsed.has_lazy_data() + _shared_utils.assert_array_almost_equal( + cube_collapsed.data, self.expected_result_x + ) + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_sum_fullweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.full_weights_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_sum_fullweights_adapt_units_lazy_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) cube_collapsed = self.cube_lazy.collapsed("y", SUM, weights=self.full_weights_y) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_sum_1dweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * 1 = kg m-2 s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.y_weights) - self.assertEqual(cube_collapsed.units, "kg m-2 s-1") - self.assertEqual(cube_collapsed.units.origin, "kg m-2 s-1") + assert cube_collapsed.units == "kg m-2 s-1" + assert cube_collapsed.units.origin == "kg m-2 s-1" def test_weighted_sum_with_unknown_units_real_y(self): # Check that units are adapted correctly ('unknown' * '1' = 'unknown') @@ -467,7 +486,7 @@ def test_weighted_sum_with_unknown_units_real_y(self): SUM, weights=self.full_weights_y, ) - self.assertEqual(cube_collapsed.units, "unknown") + assert cube_collapsed.units == "unknown" def test_weighted_sum_with_unknown_units_lazy_y(self): # Check that units are adapted correctly ('unknown' * '1' = 'unknown') @@ -479,7 +498,7 @@ def test_weighted_sum_with_unknown_units_lazy_y(self): SUM, weights=self.full_weights_y, ) - self.assertEqual(cube_collapsed.units, "unknown") + assert cube_collapsed.units == "unknown" # Simply redo the tests of Test_collapsed__multidim_weighted_with_arr with @@ -489,9 +508,8 @@ def test_weighted_sum_with_unknown_units_lazy_y(self): class Test_collapsed__multidim_weighted_with_cube( Test_collapsed__multidim_weighted_with_arr ): - def setUp(self): - super().setUp() - + @pytest.fixture(autouse=True) + def _multidim_cube_setup(self, _multidim_arr_setup): self.y_weights_original = self.y_weights self.full_weights_y_original = self.full_weights_y self.x_weights_original = self.x_weights @@ -507,27 +525,26 @@ def setUp(self): def test_weighted_sum_fullweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.full_weights_y) - self.assertEqual(cube_collapsed.units, "kg s-1") + assert cube_collapsed.units == "kg s-1" def test_weighted_sum_fullweights_adapt_units_lazy_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) cube_collapsed = self.cube_lazy.collapsed("y", SUM, weights=self.full_weights_y) - self.assertEqual(cube_collapsed.units, "kg s-1") + assert cube_collapsed.units == "kg s-1" def test_weighted_sum_1dweights_adapt_units_real_y(self): # Check that units are adapted correctly (kg m-2 s-1 * m2 = kg s-1) # Note: the same test with lazy data fails: # https://github.com/SciTools/iris/issues/5083 cube_collapsed = self.cube_real.collapsed("y", SUM, weights=self.y_weights) - self.assertEqual(cube_collapsed.units, "kg s-1") + assert cube_collapsed.units == "kg s-1" class Test_collapsed__multidim_weighted_with_str( Test_collapsed__multidim_weighted_with_cube ): - def setUp(self): - super().setUp() - + @pytest.fixture(autouse=True) + def _multidim_str_setup(self, _multidim_cube_setup): self.full_weights_y = "full_y" self.full_weights_x = "full_x" self.y_weights = "y" @@ -561,17 +578,17 @@ def setUp(self): class Test_collapsed__multidim_weighted_with_dim_metadata( Test_collapsed__multidim_weighted_with_str ): - def setUp(self): - super().setUp() - + @pytest.fixture(autouse=True) + def _setup(self, _multidim_str_setup): self.full_weights_y = self.dim_metadata_full_y self.full_weights_x = self.dim_metadata_full_x self.y_weights = self.dim_metadata_1d_y self.x_weights = self.dim_metadata_1d_x -class Test_collapsed__cellmeasure_ancils(tests.IrisTest): - def setUp(self): +class Test_collapsed__cellmeasure_ancils: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6.0).reshape((2, 3))) for i_dim, name in enumerate(("y", "x")): npts = cube.shape[i_dim] @@ -585,19 +602,19 @@ def setUp(self): def test_ancillary_variables_and_cell_measures_kept(self): cube_collapsed = self.cube.collapsed("x", MEAN) - self.assertEqual( - cube_collapsed.ancillary_variables(), [self.ancillary_variable] - ) - self.assertEqual(cube_collapsed.cell_measures(), [self.cell_measure]) + assert cube_collapsed.ancillary_variables() == [self.ancillary_variable] + + assert cube_collapsed.cell_measures() == [self.cell_measure] def test_ancillary_variables_and_cell_measures_removed(self): cube_collapsed = self.cube.collapsed("y", MEAN) - self.assertEqual(cube_collapsed.ancillary_variables(), []) - self.assertEqual(cube_collapsed.cell_measures(), []) + assert cube_collapsed.ancillary_variables() == [] + assert cube_collapsed.cell_measures() == [] -class Test_collapsed__warning(tests.IrisTest): - def setUp(self): +class Test_collapsed__warning: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube([[1, 2], [1, 2]]) lat = DimCoord([1, 2], standard_name="latitude") lon = DimCoord([1, 2], standard_name="longitude") @@ -624,79 +641,80 @@ def _assert_warn_collapse_without_weight(self, coords, warn): # Ensure that warning is raised. msg = "Collapsing spatial coordinate {!r} without weighting" for coord in coords: - self.assertIn( - mock.call(msg.format(coord), category=IrisUserWarning), - warn.call_args_list, + assert ( + mock.call(msg.format(coord), category=IrisUserWarning) + in warn.call_args_list ) def _assert_nowarn_collapse_without_weight(self, coords, warn): # Ensure that warning is not raised. msg = "Collapsing spatial coordinate {!r} without weighting" for coord in coords: - self.assertNotIn(mock.call(msg.format(coord)), warn.call_args_list) + assert not mock.call(msg.format(coord)) in warn.call_args_list - def test_lat_lon_noweighted_aggregator(self): + def test_lat_lon_noweighted_aggregator(self, mocker): # Collapse latitude coordinate with unweighted aggregator. aggregator = mock.Mock(spec=Aggregator, lazy_func=None) aggregator.cell_method = None coords = ["latitude", "longitude"] - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator, somekeyword="bla") + warn = mocker.patch("warnings.warn") + self.cube.collapsed(coords, aggregator, somekeyword="bla") self._assert_nowarn_collapse_without_weight(coords, warn) - def test_lat_lon_weighted_aggregator(self): + def test_lat_lon_weighted_aggregator(self, mocker): # Collapse latitude coordinate with weighted aggregator without # providing weights. aggregator = self._aggregator(False) coords = ["latitude", "longitude"] - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) + warn = mocker.patch("warnings.warn") + self.cube.collapsed(coords, aggregator) coords = [coord for coord in coords if "latitude" in coord] self._assert_warn_collapse_without_weight(coords, warn) - def test_lat_lon_weighted_aggregator_with_weights(self): + def test_lat_lon_weighted_aggregator_with_weights(self, mocker): # Collapse latitude coordinate with a weighted aggregators and # providing suitable weights. weights = np.array([[0.1, 0.5], [0.3, 0.2]]) aggregator = self._aggregator(True) coords = ["latitude", "longitude"] - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator, weights=weights) + warn = mocker.patch("warnings.warn") + self.cube.collapsed(coords, aggregator, weights=weights) self._assert_nowarn_collapse_without_weight(coords, warn) - def test_lat_lon_weighted_aggregator_alt(self): + def test_lat_lon_weighted_aggregator_alt(self, mocker): # Collapse grid_latitude coordinate with weighted aggregator without # providing weights. Tests coordinate matching logic. aggregator = self._aggregator(False) coords = ["grid_latitude", "grid_longitude"] - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) + warn = mocker.patch("warnings.warn") + self.cube.collapsed(coords, aggregator) coords = [coord for coord in coords if "latitude" in coord] self._assert_warn_collapse_without_weight(coords, warn) - def test_no_lat_weighted_aggregator_mixed(self): + def test_no_lat_weighted_aggregator_mixed(self, mocker): # Collapse grid_latitude and an unmatched coordinate (not lat/lon) # with weighted aggregator without providing weights. # Tests coordinate matching logic. aggregator = self._aggregator(False) coords = ["wibble"] - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) + warn = mocker.patch("warnings.warn") + self.cube.collapsed(coords, aggregator) self._assert_nowarn_collapse_without_weight(coords, warn) -class Test_collapsed_coord_with_3_bounds(tests.IrisTest): - def setUp(self): +class Test_collapsed_coord_with_3_bounds: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube([1, 2]) bounds = [[0.0, 1.0, 2.0], [2.0, 3.0, 4.0]] @@ -716,59 +734,59 @@ def _assert_warn_cannot_check_contiguity(self, warn): f"bounds. Metadata may not be fully descriptive for " f"'{coord}'. Ignoring bounds." ) - self.assertIn( - mock.call(msg, category=IrisVagueMetadataWarning), - warn.call_args_list, + assert ( + mock.call(msg, category=IrisVagueMetadataWarning) in warn.call_args_list ) def _assert_cube_as_expected(self, cube): """Ensure that cube data and coordinates are as expected.""" - self.assertArrayEqual(cube.data, np.array(3)) + _shared_utils.assert_array_equal(cube.data, np.array(3)) lat = cube.coord("latitude") - self.assertArrayAlmostEqual(lat.points, np.array([1.5])) - self.assertArrayAlmostEqual(lat.bounds, np.array([[1.0, 2.0]])) + _shared_utils.assert_array_almost_equal(lat.points, np.array([1.5])) + _shared_utils.assert_array_almost_equal(lat.bounds, np.array([[1.0, 2.0]])) lon = cube.coord("longitude") - self.assertArrayAlmostEqual(lon.points, np.array([1.5])) - self.assertArrayAlmostEqual(lon.bounds, np.array([[1.0, 2.0]])) + _shared_utils.assert_array_almost_equal(lon.points, np.array([1.5])) + _shared_utils.assert_array_almost_equal(lon.bounds, np.array([[1.0, 2.0]])) - def test_collapsed_lat_with_3_bounds(self): + def test_collapsed_lat_with_3_bounds(self, mocker): """Collapse latitude with 3 bounds.""" - with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed("latitude", SUM) + warn = mocker.patch("warnings.warn") + collapsed_cube = self.cube.collapsed("latitude", SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) - def test_collapsed_lon_with_3_bounds(self): + def test_collapsed_lon_with_3_bounds(self, mocker): """Collapse longitude with 3 bounds.""" - with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed("longitude", SUM) + warn = mocker.patch("warnings.warn") + collapsed_cube = self.cube.collapsed("longitude", SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) - def test_collapsed_lat_lon_with_3_bounds(self): + def test_collapsed_lat_lon_with_3_bounds(self, mocker): """Collapse latitude and longitude with 3 bounds.""" - with mock.patch("warnings.warn") as warn: - collapsed_cube = self.cube.collapsed(["latitude", "longitude"], SUM) + warn = mocker.patch("warnings.warn") + collapsed_cube = self.cube.collapsed(["latitude", "longitude"], SUM) self._assert_warn_cannot_check_contiguity(warn) self._assert_cube_as_expected(collapsed_cube) -class Test_summary(tests.IrisTest): - def setUp(self): +class Test_summary: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube(0) def test_cell_datetime_objects(self): self.cube.add_aux_coord(AuxCoord(42, units="hours since epoch")) summary = self.cube.summary() - self.assertIn("1970-01-02 18:00:00", summary) + assert "1970-01-02 18:00:00" in summary def test_scalar_str_coord(self): str_value = "foo" self.cube.add_aux_coord(AuxCoord(str_value)) summary = self.cube.summary() - self.assertIn(str_value, summary) + assert str_value in summary def test_ancillary_variable(self): cube = Cube(np.arange(6).reshape(2, 3)) @@ -779,7 +797,7 @@ def test_ancillary_variable(self): " Ancillary variables:\n" " status_flag x -" ) - self.assertEqual(expected_summary, cube.summary()) + assert expected_summary == cube.summary() def test_similar_coords(self): coord1 = AuxCoord(42, long_name="foo", attributes=dict(bar=np.array([2, 5]))) @@ -787,7 +805,7 @@ def test_similar_coords(self): coord2.attributes = dict(bar="baz") for coord in [coord1, coord2]: self.cube.add_aux_coord(coord) - self.assertIn("baz", self.cube.summary()) + assert "baz" in self.cube.summary() def test_long_components(self): # Check that components with long names 'stretch' the printout correctly. @@ -821,7 +839,7 @@ def test_long_components(self): # For lines with any columns : check that columns are where expected for col_ind in colon_inds: # Chop out chars before+after each expected column. - self.assertEqual(line[col_ind - 1 : col_ind + 2], " x ") + assert line[col_ind - 1 : col_ind + 2] == " x " # Finally also: compare old with new, but replacing new name and ignoring spacing differences def collapse_space(string): @@ -830,37 +848,38 @@ def collapse_space(string): string = string.replace(" ", " ") return string - self.assertEqual( - collapse_space(new_summary).replace(long_name, old_name), - collapse_space(original_summary), - ) + assert collapse_space(new_summary).replace( + long_name, old_name + ) == collapse_space(original_summary) -class Test_is_compatible(tests.IrisTest): - def setUp(self): +class Test_is_compatible: + @pytest.fixture(autouse=True) + def _setup(self): self.test_cube = Cube([1.0]) self.other_cube = self.test_cube.copy() def test_noncommon_array_attrs_compatible(self): # Non-common array attributes should be ok. self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_cube.is_compatible(self.other_cube)) + assert self.test_cube.is_compatible(self.other_cube) def test_matching_array_attrs_compatible(self): # Matching array attributes should be ok. self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) self.other_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_cube.is_compatible(self.other_cube)) + assert self.test_cube.is_compatible(self.other_cube) def test_different_array_attrs_incompatible(self): # Differing array attributes should make the cubes incompatible. self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) self.other_cube.attributes["array_test"] = np.array([1.0, 2, 777.7]) - self.assertFalse(self.test_cube.is_compatible(self.other_cube)) + assert not self.test_cube.is_compatible(self.other_cube) -class Test_rolling_window(tests.IrisTest): - def setUp(self): +class Test_rolling_window: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube(np.arange(6), units="kg") self.multi_dim_cube = Cube(np.arange(36).reshape(6, 6)) val_coord = DimCoord([0, 1, 2, 3, 4, 5], long_name="val", units="s") @@ -902,8 +921,8 @@ def test_string_coord(self): ), long_name="month", ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("month"), month_coord) + assert res_cube.coord("val") == val_coord + assert res_cube.coord("month") == month_coord def test_kwargs(self): # Rolling window with missing data not tolerated @@ -917,7 +936,7 @@ def test_kwargs(self): mask=[True, False, False, True, True], dtype=np.float64, ) - self.assertMaskedArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_masked_array_equal(expected_result, res_cube.data) def test_lazy(self): window = 2 @@ -925,58 +944,59 @@ def test_lazy(self): self.cube.data, mask=([True, False, False, False, True, False]) ) res_cube = self.cube.rolling_window("val", iris.analysis.MEAN, window, mdtol=0) - self.assertTrue(self.cube.has_lazy_data()) - self.assertTrue(res_cube.has_lazy_data()) + assert self.cube.has_lazy_data() + assert res_cube.has_lazy_data() expected_result = ma.array( [-99.0, 1.5, 2.5, -99.0, -99.0], mask=[True, False, False, True, True], dtype=np.float64, ) - self.assertMaskedArrayEqual(expected_result, res_cube.data) + _shared_utils.assert_masked_array_equal(expected_result, res_cube.data) def test_ancillary_variables_and_cell_measures_kept(self): res_cube = self.multi_dim_cube.rolling_window("val", self.mock_agg, 3) - self.assertEqual(res_cube.ancillary_variables(), [self.ancillary_variable]) - self.assertEqual(res_cube.cell_measures(), [self.cell_measure]) + assert res_cube.ancillary_variables() == [self.ancillary_variable] + assert res_cube.cell_measures() == [self.cell_measure] def test_ancillary_variables_and_cell_measures_removed(self): res_cube = self.multi_dim_cube.rolling_window("extra", self.mock_agg, 3) - self.assertEqual(res_cube.ancillary_variables(), []) - self.assertEqual(res_cube.cell_measures(), []) + assert res_cube.ancillary_variables() == [] + assert res_cube.cell_measures() == [] def test_weights_arr(self): weights = np.array([0, 0, 1, 0, 2]) res_cube = self.cube.rolling_window("val", SUM, 5, weights=weights) - np.testing.assert_array_equal(res_cube.data, [10, 13]) - self.assertEqual(res_cube.units, "kg") + _shared_utils.assert_array_equal(res_cube.data, [10, 13]) + assert res_cube.units == "kg" def test_weights_cube(self): weights = Cube([0, 0, 1, 0, 2], units="m2") res_cube = self.cube.rolling_window("val", SUM, 5, weights=weights) - np.testing.assert_array_equal(res_cube.data, [10, 13]) - self.assertEqual(res_cube.units, "kg m2") + _shared_utils.assert_array_equal(res_cube.data, [10, 13]) + assert res_cube.units == "kg m2" def test_weights_str(self): weights = "val" res_cube = self.cube.rolling_window("val", SUM, 6, weights=weights) - np.testing.assert_array_equal(res_cube.data, [55]) - self.assertEqual(res_cube.units, "kg s") + _shared_utils.assert_array_equal(res_cube.data, [55]) + assert res_cube.units == "kg s" def test_weights_dim_coord(self): weights = self.cube.coord("val") res_cube = self.cube.rolling_window("val", SUM, 6, weights=weights) - np.testing.assert_array_equal(res_cube.data, [55]) - self.assertEqual(res_cube.units, "kg s") + _shared_utils.assert_array_equal(res_cube.data, [55]) + assert res_cube.units == "kg s" -class Test_slices_dim_order(tests.IrisTest): +class Test_slices_dim_order: """Test the capability of iris.cube.Cube.slices(). Test the capability of iris.cube.Cube.slices(), including its ability to correctly re-order the dimensions. """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): """Setup a 4D iris cube, each dimension is length 1. The dimensions are; dim1: time @@ -1041,16 +1061,17 @@ def check_order(self, dim1, dim2, dim3, dim_to_remove): sliced_cube = next(self.cube.slices([dim1, dim2, dim3])) sliced_cube.remove_coord(dim_to_remove) expected_cube = self.expected_cube_setup(dim1, dim2, dim3) - self.assertEqual(sliced_cube, expected_cube) + assert sliced_cube == expected_cube def test_all_permutations(self): for perm in permutations(["time", "height", "latitude", "longitude"]): self.check_order(*perm) -@tests.skip_data -class Test_slices_over(tests.IrisTest): - def setUp(self): +@_shared_utils.skip_data +class Test_slices_over: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.realistic_4d()[:, :7, :10, :10] # Define expected iterators for 1D and 2D test cases. self.exp_iter_1d = range(len(self.cube.coord("model_level_number").points)) @@ -1063,30 +1084,30 @@ def test_1d_slice_coord_given(self): res = self.cube.slices_over(self.cube.coord("model_level_number")) for i, res_cube in zip(self.exp_iter_1d, res): expected = self.cube[:, i] - self.assertEqual(res_cube, expected) + assert res_cube == expected def test_1d_slice_nonexistent_coord_given(self): - with self.assertRaises(CoordinateNotFoundError): + with pytest.raises(CoordinateNotFoundError): _ = self.cube.slices_over(self.cube.coord("wibble")) def test_1d_slice_coord_name_given(self): res = self.cube.slices_over("model_level_number") for i, res_cube in zip(self.exp_iter_1d, res): expected = self.cube[:, i] - self.assertEqual(res_cube, expected) + assert res_cube == expected def test_1d_slice_nonexistent_coord_name_given(self): - with self.assertRaises(CoordinateNotFoundError): + with pytest.raises(CoordinateNotFoundError): _ = self.cube.slices_over("wibble") def test_1d_slice_dimension_given(self): res = self.cube.slices_over(1) for i, res_cube in zip(self.exp_iter_1d, res): expected = self.cube[:, i] - self.assertEqual(res_cube, expected) + assert res_cube == expected def test_1d_slice_nonexistent_dimension_given(self): - with self.assertRaisesRegex(ValueError, "iterator over a dimension"): + with pytest.raises(ValueError, match="iterator over a dimension"): _ = self.cube.slices_over(self.cube.ndim + 1) def test_2d_slice_coord_given(self): @@ -1100,10 +1121,10 @@ def test_2d_slice_coord_given(self): # Replace the dimensions not iterated over with spanning slices. indices[2] = indices[3] = slice(None) expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) + assert next(res) == expected def test_2d_slice_nonexistent_coord_given(self): - with self.assertRaises(CoordinateNotFoundError): + with pytest.raises(CoordinateNotFoundError): _ = self.cube.slices_over( [self.cube.coord("time"), self.cube.coord("wibble")] ) @@ -1117,10 +1138,10 @@ def test_2d_slice_coord_name_given(self): # Replace the dimensions not iterated over with spanning slices. indices[2] = indices[3] = slice(None) expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) + assert next(res) == expected def test_2d_slice_nonexistent_coord_name_given(self): - with self.assertRaises(CoordinateNotFoundError): + with pytest.raises(CoordinateNotFoundError): _ = self.cube.slices_over(["time", "wibble"]) def test_2d_slice_dimension_given(self): @@ -1132,7 +1153,7 @@ def test_2d_slice_dimension_given(self): # Replace the dimensions not iterated over with spanning slices. indices[2] = indices[3] = slice(None) expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) + assert next(res) == expected def test_2d_slice_reversed_dimension_given(self): # Confirm that reversing the order of the dimensions returns the same @@ -1143,10 +1164,10 @@ def test_2d_slice_reversed_dimension_given(self): # Replace the dimensions not iterated over with spanning slices. indices[2] = indices[3] = slice(None) expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) + assert next(res) == expected def test_2d_slice_nonexistent_dimension_given(self): - with self.assertRaisesRegex(ValueError, "iterator over a dimension"): + with pytest.raises(ValueError, match="iterator over a dimension"): _ = self.cube.slices_over([0, self.cube.ndim + 1]) def test_multidim_slice_coord_given(self): @@ -1160,24 +1181,24 @@ def test_multidim_slice_coord_given(self): # Replace the dimensions not iterated over with spanning slices. indices[0] = indices[1] = slice(None) expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) + assert next(res) == expected def test_duplicate_coordinate_given(self): res = self.cube.slices_over([1, 1]) for i, res_cube in zip(self.exp_iter_1d, res): expected = self.cube[:, i] - self.assertEqual(res_cube, expected) + assert res_cube == expected def test_non_orthogonal_coordinates_given(self): res = self.cube.slices_over(["model_level_number", "sigma"]) for i, res_cube in zip(self.exp_iter_1d, res): expected = self.cube[:, i] - self.assertEqual(res_cube, expected) + assert res_cube == expected def test_nodimension(self): # Slicing over no dimension should return the whole cube. res = self.cube.slices_over([]) - self.assertEqual(next(res), self.cube) + assert next(res) == self.cube def create_cube(lon_min, lon_max, bounds=False): @@ -1244,178 +1265,200 @@ def create_cube(lon_min, lon_max, bounds=False): # Ensure all the other coordinates and factories are correctly preserved. -class Test_intersection__Metadata(tests.IrisTest): - def test_metadata(self): +class Test_intersection__Metadata: + def test_metadata(self, request): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190)) - self.assertCMLApproxData(result) + _shared_utils.assert_CML_approx_data(request, result) - def test_metadata_wrapped(self): + def test_metadata_wrapped(self, request): cube = create_cube(-180, 180) result = cube.intersection(longitude=(170, 190)) - self.assertCMLApproxData(result) + _shared_utils.assert_CML_approx_data(request, result) # Explicitly check the handling of `circular` on the result. -class Test_intersection__Circular(tests.IrisTest): +class Test_intersection__Circular: def test_regional(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.coord("longitude").circular) + assert not result.coord("longitude").circular def test_regional_wrapped(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.coord("longitude").circular) + assert not result.coord("longitude").circular def test_global(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(-180, 180)) - self.assertTrue(result.coord("longitude").circular) + assert result.coord("longitude").circular def test_global_wrapped(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(10, 370)) - self.assertTrue(result.coord("longitude").circular) + assert result.coord("longitude").circular # Check the various error conditions. -class Test_intersection__Invalid(tests.IrisTest): +class Test_intersection__Invalid: def test_reversed_min_max(self): cube = create_cube(0, 360) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cube.intersection(longitude=(30, 10)) def test_dest_too_large(self): cube = create_cube(0, 360) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cube.intersection(longitude=(30, 500)) def test_src_too_large(self): cube = create_cube(0, 400) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cube.intersection(longitude=(10, 30)) def test_missing_coord(self): cube = create_cube(0, 360) - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): + with pytest.raises(iris.exceptions.CoordinateNotFoundError): cube.intersection(parrots=(10, 30)) def test_multi_dim_coord(self): cube = create_cube(0, 360) - with self.assertRaises(iris.exceptions.CoordinateMultiDimError): + with pytest.raises(iris.exceptions.CoordinateMultiDimError): cube.intersection(surface_altitude=(10, 30)) def test_null_region(self): # 10 <= v < 10 cube = create_cube(0, 360) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): cube.intersection(longitude=(10, 10, False, False)) -class Test_intersection__Lazy(tests.IrisTest): +class Test_intersection__Lazy: def test_real_data(self): cube = create_cube(0, 360) cube.data result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + assert not result.has_lazy_data() + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(170, 191) + ) + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_real_data_wrapped(self): cube = create_cube(-180, 180) cube.data result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + assert not result.has_lazy_data() + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(170, 191) + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_lazy_data(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190)) - self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + assert result.has_lazy_data() + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(170, 191) + ) + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_lazy_data_wrapped(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(170, 190)) - self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual(result.coord("longitude").points, np.arange(170, 191)) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + assert result.has_lazy_data() + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(170, 191) + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 -class Test_intersection_Points(tests.IrisTest): +class Test_intersection_Points: def test_ignore_bounds(self): cube = create_cube(0, 30, bounds=True) result = cube.intersection(longitude=(9.5, 12.5), ignore_bounds=True) - self.assertArrayEqual(result.coord("longitude").points, np.arange(10, 13)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [9.5, 10.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [11.5, 12.5]) + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(10, 13) + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [9.5, 10.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [11.5, 12.5] + ) # Check what happens with a regional, points-only circular intersection # coordinate. -class Test_intersection__RegionalSrcModulus(tests.IrisTest): +class Test_intersection__RegionalSrcModulus: def test_request_subset(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(45, 50)) - self.assertArrayEqual(result.coord("longitude").points, np.arange(45, 51)) - self.assertArrayEqual(result.data[0, 0], np.arange(5, 11)) + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(45, 51) + ) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(5, 11)) def test_request_left(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35, 45)) - self.assertArrayEqual(result.coord("longitude").points, np.arange(40, 46)) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 6)) + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(40, 46) + ) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(0, 6)) def test_request_right(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(55, 65)) - self.assertArrayEqual(result.coord("longitude").points, np.arange(55, 60)) - self.assertArrayEqual(result.data[0, 0], np.arange(15, 20)) + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(55, 60) + ) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(15, 20)) def test_request_superset(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35, 65)) - self.assertArrayEqual(result.coord("longitude").points, np.arange(40, 60)) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 20)) + _shared_utils.assert_array_equal( + result.coord("longitude").points, np.arange(40, 60) + ) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(0, 20)) def test_request_subset_modulus(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(45 + 360, 50 + 360)) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result.coord("longitude").points, np.arange(45 + 360, 51 + 360) ) - self.assertArrayEqual(result.data[0, 0], np.arange(5, 11)) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(5, 11)) def test_request_left_modulus(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35 + 360, 45 + 360)) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result.coord("longitude").points, np.arange(40 + 360, 46 + 360) ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 6)) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(0, 6)) def test_request_right_modulus(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(55 + 360, 65 + 360)) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result.coord("longitude").points, np.arange(55 + 360, 60 + 360) ) - self.assertArrayEqual(result.data[0, 0], np.arange(15, 20)) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(15, 20)) def test_request_superset_modulus(self): cube = create_cube(40, 60) result = cube.intersection(longitude=(35 + 360, 65 + 360)) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result.coord("longitude").points, np.arange(40 + 360, 60 + 360) ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 20)) + _shared_utils.assert_array_equal(result.data[0, 0], np.arange(0, 20)) def test_tolerance_f4(self): cube = create_cube(0, 5) @@ -1423,10 +1466,10 @@ def test_tolerance_f4(self): [0.0, 3.74999905, 7.49999809, 11.24999714, 14.99999619], dtype="f4" ) result = cube.intersection(longitude=(0, 5)) - self.assertArrayAlmostEqual( + _shared_utils.assert_array_almost_equal( result.coord("longitude").points, np.array([0.0, 3.74999905]) ) - self.assertArrayEqual(result.data[0, 0], np.array([0, 1])) + _shared_utils.assert_array_equal(result.data[0, 0], np.array([0, 1])) def test_tolerance_f8(self): cube = create_cube(0, 5) @@ -1434,15 +1477,15 @@ def test_tolerance_f8(self): [0.0, 3.74999905, 7.49999809, 11.24999714, 14.99999619], dtype="f8" ) result = cube.intersection(longitude=(0, 5)) - self.assertArrayAlmostEqual( + _shared_utils.assert_array_almost_equal( result.coord("longitude").points, np.array([0.0, 3.74999905]) ) - self.assertArrayEqual(result.data[0, 0], np.array([0, 1])) + _shared_utils.assert_array_equal(result.data[0, 0], np.array([0, 1])) # Check what happens with a global, points-only circular intersection # coordinate. -class Test_intersection__GlobalSrcModulus(tests.IrisTest): +class Test_intersection__GlobalSrcModulus: def test_global_wrapped_extreme_increasing_base_period(self): # Ensure that we can correctly handle points defined at (base + period) cube = create_cube(-180.0, 180.0) @@ -1450,7 +1493,7 @@ def test_global_wrapped_extreme_increasing_base_period(self): # Redefine longitude so that points at (base + period) lons.points = np.linspace(-180.0, 180, lons.points.size) result = cube.intersection(longitude=(lons.points.min(), lons.points.max())) - self.assertArrayEqual(result.data, cube.data) + _shared_utils.assert_array_equal(result.data, cube.data) def test_global_wrapped_extreme_decreasing_base_period(self): # Ensure that we can correctly handle points defined at (base + period) @@ -1459,41 +1502,41 @@ def test_global_wrapped_extreme_decreasing_base_period(self): # Redefine longitude so that points at (base + period) lons.points = np.linspace(180.0, -180.0, lons.points.size) result = cube.intersection(longitude=(lons.points.min(), lons.points.max())) - self.assertArrayEqual(result.data, cube.data) + _shared_utils.assert_array_equal(result.data, cube.data) def test_global(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(0, 360)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 359) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) + assert result.coord("longitude").points[0] == 0 + assert result.coord("longitude").points[-1] == 359 + assert result.data[0, 0, 0] == 0 + assert result.data[0, 0, -1] == 359 def test_global_wrapped(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(-180, 180)) - self.assertEqual(result.coord("longitude").points[0], -180) - self.assertEqual(result.coord("longitude").points[-1], 179) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) + assert result.coord("longitude").points[0] == -180 + assert result.coord("longitude").points[-1] == 179 + assert result.data[0, 0, 0] == 180 + assert result.data[0, 0, -1] == 179 def test_aux_coord(self): cube = create_cube(0, 360) cube.replace_coord(iris.coords.AuxCoord.from_coord(cube.coord("longitude"))) result = cube.intersection(longitude=(0, 360)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 359) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) + assert result.coord("longitude").points[0] == 0 + assert result.coord("longitude").points[-1] == 359 + assert result.data[0, 0, 0] == 0 + assert result.data[0, 0, -1] == 359 def test_aux_coord_wrapped(self): cube = create_cube(0, 360) cube.replace_coord(iris.coords.AuxCoord.from_coord(cube.coord("longitude"))) result = cube.intersection(longitude=(-180, 180)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], -1) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) + assert result.coord("longitude").points[0] == 0 + assert result.coord("longitude").points[-1] == -1 + assert result.data[0, 0, 0] == 0 + assert result.data[0, 0, -1] == 359 def test_aux_coord_non_contiguous_wrapped(self): cube = create_cube(0, 360) @@ -1501,103 +1544,103 @@ def test_aux_coord_non_contiguous_wrapped(self): coord.points = (coord.points * 1.5) % 360 cube.replace_coord(coord) result = cube.intersection(longitude=(-90, 90)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 90) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 300) + assert result.coord("longitude").points[0] == 0 + assert result.coord("longitude").points[-1] == 90 + assert result.data[0, 0, 0] == 0 + assert result.data[0, 0, -1] == 300 def test_decrementing(self): cube = create_cube(360, 0) result = cube.intersection(longitude=(40, 60)) - self.assertEqual(result.coord("longitude").points[0], 60) - self.assertEqual(result.coord("longitude").points[-1], 40) - self.assertEqual(result.data[0, 0, 0], 300) - self.assertEqual(result.data[0, 0, -1], 320) + assert result.coord("longitude").points[0] == 60 + assert result.coord("longitude").points[-1] == 40 + assert result.data[0, 0, 0] == 300 + assert result.data[0, 0, -1] == 320 def test_decrementing_wrapped(self): cube = create_cube(360, 0) result = cube.intersection(longitude=(-10, 10)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], -10) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + assert result.coord("longitude").points[0] == 10 + assert result.coord("longitude").points[-1] == -10 + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_no_wrap_after_modulus(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170 + 360, 190 + 360)) - self.assertEqual(result.coord("longitude").points[0], 170 + 360) - self.assertEqual(result.coord("longitude").points[-1], 190 + 360) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + assert result.coord("longitude").points[0] == 170 + 360 + assert result.coord("longitude").points[-1] == 190 + 360 + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_wrap_after_modulus(self): cube = create_cube(-180, 180) result = cube.intersection(longitude=(170 + 360, 190 + 360)) - self.assertEqual(result.coord("longitude").points[0], 170 + 360) - self.assertEqual(result.coord("longitude").points[-1], 190 + 360) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + assert result.coord("longitude").points[0] == 170 + 360 + assert result.coord("longitude").points[-1] == 190 + 360 + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_select_by_coord(self): cube = create_cube(0, 360) coord = iris.coords.DimCoord(0, "longitude", units="degrees") result = cube.intersection(iris.coords.CoordExtent(coord, 10, 30)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], 30) - self.assertEqual(result.data[0, 0, 0], 10) - self.assertEqual(result.data[0, 0, -1], 30) + assert result.coord("longitude").points[0] == 10 + assert result.coord("longitude").points[-1] == 30 + assert result.data[0, 0, 0] == 10 + assert result.data[0, 0, -1] == 30 def test_inclusive_exclusive(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190, True, False)) - self.assertEqual(result.coord("longitude").points[0], 170) - self.assertEqual(result.coord("longitude").points[-1], 189) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 189) + assert result.coord("longitude").points[0] == 170 + assert result.coord("longitude").points[-1] == 189 + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 189 def test_exclusive_inclusive(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190, False)) - self.assertEqual(result.coord("longitude").points[0], 171) - self.assertEqual(result.coord("longitude").points[-1], 190) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 190) + assert result.coord("longitude").points[0] == 171 + assert result.coord("longitude").points[-1] == 190 + assert result.data[0, 0, 0] == 171 + assert result.data[0, 0, -1] == 190 def test_exclusive_exclusive(self): cube = create_cube(0, 360) result = cube.intersection(longitude=(170, 190, False, False)) - self.assertEqual(result.coord("longitude").points[0], 171) - self.assertEqual(result.coord("longitude").points[-1], 189) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 189) + assert result.coord("longitude").points[0] == 171 + assert result.coord("longitude").points[-1] == 189 + assert result.data[0, 0, 0] == 171 + assert result.data[0, 0, -1] == 189 def test_single_point(self): # 10 <= v <= 10 cube = create_cube(0, 360) result = cube.intersection(longitude=(10, 10)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], 10) - self.assertEqual(result.data[0, 0, 0], 10) - self.assertEqual(result.data[0, 0, -1], 10) + assert result.coord("longitude").points[0] == 10 + assert result.coord("longitude").points[-1] == 10 + assert result.data[0, 0, 0] == 10 + assert result.data[0, 0, -1] == 10 def test_two_points(self): # -1.5 <= v <= 0.5 cube = create_cube(0, 360) result = cube.intersection(longitude=(-1.5, 0.5)) - self.assertEqual(result.coord("longitude").points[0], -1) - self.assertEqual(result.coord("longitude").points[-1], 0) - self.assertEqual(result.data[0, 0, 0], 359) - self.assertEqual(result.data[0, 0, -1], 0) + assert result.coord("longitude").points[0] == -1 + assert result.coord("longitude").points[-1] == 0 + assert result.data[0, 0, 0] == 359 + assert result.data[0, 0, -1] == 0 def test_wrap_radians(self): cube = create_cube(0, 360) cube.coord("longitude").convert_units("radians") result = cube.intersection(longitude=(-1, 0.5)) - self.assertArrayAllClose( + _shared_utils.assert_array_all_close( result.coord("longitude").points, np.arange(-57, 29) * np.pi / 180 ) - self.assertEqual(result.data[0, 0, 0], 303) - self.assertEqual(result.data[0, 0, -1], 28) + assert result.data[0, 0, 0] == 303 + assert result.data[0, 0, -1] == 28 def test_tolerance_bug(self): # Floating point changes introduced by wrapping mean @@ -1606,7 +1649,7 @@ def test_tolerance_bug(self): cube = create_cube(0, 400) cube.coord("longitude").points = np.linspace(-179.55, 179.55, 400) result = cube.intersection(longitude=(125, 145)) - self.assertArrayAlmostEqual( + _shared_utils.assert_array_almost_equal( result.coord("longitude").points, cube.coord("longitude").points[339:361], ) @@ -1620,138 +1663,194 @@ def test_tolerance_bug_wrapped(self): cube.coord("longitude").points[389:] - 360.0, cube.coord("longitude").points[:11], ) - self.assertArrayAlmostEqual(result.coord("longitude").points, expected) + _shared_utils.assert_array_almost_equal( + result.coord("longitude").points, expected + ) # Check what happens with a global, points-and-bounds circular # intersection coordinate. -class Test_intersection__ModulusBounds(tests.IrisTest): +class Test_intersection__ModulusBounds: def test_global_wrapped_extreme_increasing_base_period(self): # Ensure that we can correctly handle bounds defined at (base + period) cube = create_cube(-180.0, 180.0, bounds=True) lons = cube.coord("longitude") result = cube.intersection(longitude=(lons.bounds.min(), lons.bounds.max())) - self.assertArrayEqual(result.data, cube.data) + _shared_utils.assert_array_equal(result.data, cube.data) def test_global_wrapped_extreme_decreasing_base_period(self): # Ensure that we can correctly handle bounds defined at (base + period) cube = create_cube(180.0, -180.0, bounds=True) lons = cube.coord("longitude") result = cube.intersection(longitude=(lons.bounds.min(), lons.bounds.max())) - self.assertArrayEqual(result.data, cube.data) + _shared_utils.assert_array_equal(result.data, cube.data) def test_misaligned_points_inside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(169.75, 190.25)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [169.5, 170.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [189.5, 190.5] + ) + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_misaligned_points_outside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.25, 189.75)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [169.5, 170.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [189.5, 190.5] + ) + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_misaligned_bounds(self): cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-0.5, 0.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [358.5, 359.5]) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-0.5, 0.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [358.5, 359.5] + ) + assert result.data[0, 0, 0] == 180 + assert result.data[0, 0, -1] == 179 def test_misaligned_bounds_decreasing(self): cube = create_cube(180, -180, bounds=True) result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [359.5, 358.5]) - self.assertArrayEqual(result.coord("longitude").points[-1], 0) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [0.5, -0.5]) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [359.5, 358.5] + ) + _shared_utils.assert_array_equal(result.coord("longitude").points[-1], 0) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [0.5, -0.5] + ) + assert result.data[0, 0, 0] == 181 + assert result.data[0, 0, -1] == 180 def test_aligned_inclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.5, 189.5)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [169.5, 170.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [189.5, 190.5]) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [169.5, 170.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [189.5, 190.5] + ) + assert result.data[0, 0, 0] == 170 + assert result.data[0, 0, -1] == 190 def test_aligned_exclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(170.5, 189.5, False, False)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [170.5, 171.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [188.5, 189.5]) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 189) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [170.5, 171.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [188.5, 189.5] + ) + assert result.data[0, 0, 0] == 171 + assert result.data[0, 0, -1] == 189 def test_aligned_bounds_at_modulus(self): cube = create_cube(-179.5, 180.5, bounds=True) result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0, 1]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [359, 360]) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) + _shared_utils.assert_array_equal(result.coord("longitude").bounds[0], [0, 1]) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [359, 360] + ) + assert result.data[0, 0, 0] == 180 + assert result.data[0, 0, -1] == 179 def test_negative_aligned_bounds_at_modulus(self): cube = create_cube(0.5, 360.5, bounds=True) result = cube.intersection(longitude=(-180, 180)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-180, -179]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [179, 180]) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-180, -179] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [179, 180] + ) + assert result.data[0, 0, 0] == 180 + assert result.data[0, 0, -1] == 179 def test_negative_misaligned_points_inside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.25, 10.25)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-10.5, -9.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [9.5, 10.5] + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_negative_misaligned_points_outside(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-9.75, 9.75)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-10.5, -9.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [9.5, 10.5] + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_negative_aligned_inclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.5, 10.5)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-11.5, -10.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [10.5, 11.5]) - self.assertEqual(result.data[0, 0, 0], 349) - self.assertEqual(result.data[0, 0, -1], 11) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-11.5, -10.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [10.5, 11.5] + ) + assert result.data[0, 0, 0] == 349 + assert result.data[0, 0, -1] == 11 def test_negative_aligned_exclusive(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(-10.5, 10.5, False, False)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-10.5, -9.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [9.5, 10.5]) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [-10.5, -9.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [9.5, 10.5] + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_decrementing(self): cube = create_cube(360, 0, bounds=True) result = cube.intersection(longitude=(40, 60)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [60.5, 59.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [40.5, 39.5]) - self.assertEqual(result.data[0, 0, 0], 300) - self.assertEqual(result.data[0, 0, -1], 320) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [60.5, 59.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [40.5, 39.5] + ) + assert result.data[0, 0, 0] == 300 + assert result.data[0, 0, -1] == 320 def test_decrementing_wrapped(self): cube = create_cube(360, 0, bounds=True) result = cube.intersection(longitude=(-10, 10)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [10.5, 9.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [-9.5, -10.5]) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [10.5, 9.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [-9.5, -10.5] + ) + assert result.data[0, 0, 0] == 350 + assert result.data[0, 0, -1] == 10 def test_numerical_tolerance(self): # test the tolerance on the coordinate value is not causing a @@ -1759,13 +1858,13 @@ def test_numerical_tolerance(self): cube = create_cube(28.5, 68.5, bounds=True) result = cube.intersection(longitude=(27.74, 68.61)) result_lons = result.coord("longitude") - self.assertAlmostEqual(result_lons.points[0], 28.5) - self.assertAlmostEqual(result_lons.points[-1], 67.5) + _shared_utils.assert_array_almost_equal(result_lons.points[0], 28.5) + _shared_utils.assert_array_almost_equal(result_lons.points[-1], 67.5) dtype = result_lons.dtype - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result_lons.bounds[0], np.array([28.0, 29.0], dtype=dtype) ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result_lons.bounds[-1], np.array([67.0, 68.0], dtype=dtype) ) @@ -1778,13 +1877,13 @@ def test_numerical_tolerance_wrapped(self): lons.bounds = lons.bounds / 10 result = cube.intersection(longitude=(-60, 60)) result_lons = result.coord("longitude") - self.assertAlmostEqual(result_lons.points[0], -60.05) - self.assertAlmostEqual(result_lons.points[-1], 60.05) + _shared_utils.assert_array_almost_equal(result_lons.points[0], -60.05) + _shared_utils.assert_array_almost_equal(result_lons.points[-1], 60.05) dtype = result_lons.dtype - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result_lons.bounds[0], np.array([-60.1, -60.0], dtype=dtype) ) - np.testing.assert_array_almost_equal( + _shared_utils.assert_array_almost_equal( result_lons.bounds[-1], np.array([60.0, 60.1], dtype=dtype) ) @@ -1793,55 +1892,79 @@ def test_ignore_bounds_wrapped(self): cube = create_cube(0, 360, bounds=True) result = cube.intersection(longitude=(10.25, 370.25), ignore_bounds=True) # Expect points 11..370 not bounds [9.5, 10.5] .. [368.5, 369.5] - self.assertArrayEqual(result.coord("longitude").bounds[0], [10.5, 11.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [369.5, 370.5]) - self.assertEqual(result.data[0, 0, 0], 11) - self.assertEqual(result.data[0, 0, -1], 10) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [10.5, 11.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [369.5, 370.5] + ) + assert result.data[0, 0, 0] == 11 + assert result.data[0, 0, -1] == 10 def test_within_cell(self): # Test cell is included when it entirely contains the requested range cube = create_cube(0, 10, bounds=True) result = cube.intersection(longitude=(0.7, 0.8)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [0.5, 1.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 1) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [0.5, 1.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [0.5, 1.5] + ) + assert result.data[0, 0, 0] == 1 + assert result.data[0, 0, -1] == 1 def test_threshold_half(self): cube = create_cube(0, 10, bounds=True) result = cube.intersection(longitude=(1, 6.999), threshold=0.5) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [5.5, 6.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 6) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [0.5, 1.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [5.5, 6.5] + ) + assert result.data[0, 0, 0] == 1 + assert result.data[0, 0, -1] == 6 def test_threshold_full(self): cube = create_cube(0, 10, bounds=True) result = cube.intersection(longitude=(0.5, 7.499), threshold=1) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [5.5, 6.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 6) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [0.5, 1.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [5.5, 6.5] + ) + assert result.data[0, 0, 0] == 1 + assert result.data[0, 0, -1] == 6 def test_threshold_wrapped(self): # Test that a cell is wrapped to `maximum` if required to exceed # the threshold cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0.4, 360.4), threshold=0.2) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [359.5, 360.5]) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [0.5, 1.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [359.5, 360.5] + ) + assert result.data[0, 0, 0] == 181 + assert result.data[0, 0, -1] == 180 def test_threshold_wrapped_gap(self): # Test that a cell is wrapped to `maximum` if required to exceed # the threshold (even with a gap in the range) cube = create_cube(-180, 180, bounds=True) result = cube.intersection(longitude=(0.4, 360.35), threshold=0.2) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [359.5, 360.5]) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[0], [0.5, 1.5] + ) + _shared_utils.assert_array_equal( + result.coord("longitude").bounds[-1], [359.5, 360.5] + ) + assert result.data[0, 0, 0] == 181 + assert result.data[0, 0, -1] == 180 def unrolled_cube(): @@ -1859,29 +1982,34 @@ def unrolled_cube(): # Check what happens with a "unrolled" scatter-point data with a circular # intersection coordinate. -class Test_intersection__ScatterModulus(tests.IrisTest): +class Test_intersection__ScatterModulus: def test_subset(self): cube = unrolled_cube() result = cube.intersection(longitude=(5, 8)) - self.assertArrayEqual(result.coord("longitude").points, [5, 8, 5]) - self.assertArrayEqual(result.data, [0, 2, 3]) + _shared_utils.assert_array_equal(result.coord("longitude").points, [5, 8, 5]) + _shared_utils.assert_array_equal(result.data, [0, 2, 3]) def test_subset_wrapped(self): cube = unrolled_cube() result = cube.intersection(longitude=(5 + 360, 8 + 360)) - self.assertArrayEqual(result.coord("longitude").points, [365, 368, 365]) - self.assertArrayEqual(result.data, [0, 2, 3]) + _shared_utils.assert_array_equal( + result.coord("longitude").points, [365, 368, 365] + ) + _shared_utils.assert_array_equal(result.data, [0, 2, 3]) def test_superset(self): cube = unrolled_cube() result = cube.intersection(longitude=(0, 15)) - self.assertArrayEqual(result.coord("longitude").points, [5, 10, 8, 5, 3]) - self.assertArrayEqual(result.data, np.arange(5)) + _shared_utils.assert_array_equal( + result.coord("longitude").points, [5, 10, 8, 5, 3] + ) + _shared_utils.assert_array_equal(result.data, np.arange(5)) # Test the API of the cube interpolation method. -class Test_interpolate(tests.IrisTest): - def setUp(self): +class Test_interpolate: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.simple_2d() self.scheme = mock.Mock(name="interpolation scheme") @@ -1897,10 +2025,10 @@ def test_api(self): self.interpolator.assert_called_once_with( (0.5, 0.6), collapse_scalar=self.collapse_coord ) - self.assertIs(result, mock.sentinel.RESULT) + assert result is mock.sentinel.RESULT -class Test_regrid(tests.IrisTest): +class Test_regrid: def test(self): # Test that Cube.regrid() just defers to the regridder of the # given scheme. @@ -1922,21 +2050,21 @@ def regridder(self, src, target): cube = Cube(0) scheme = FakeScheme() result = cube.regrid(mock.sentinel.TARGET, scheme) - self.assertEqual(result, (scheme, cube, mock.sentinel.TARGET, cube)) + assert result == (scheme, cube, mock.sentinel.TARGET, cube) -class Test_copy(tests.IrisTest): +class Test_copy: def _check_copy(self, cube, cube_copy): - self.assertIsNot(cube_copy, cube) - self.assertEqual(cube_copy, cube) - self.assertIsNot(cube_copy.core_data(), cube.core_data()) + assert cube_copy is not cube + assert cube_copy == cube + assert cube_copy.core_data() is not cube.core_data() if ma.isMaskedArray(cube.data): - self.assertMaskedArrayEqual(cube_copy.data, cube.data) + _shared_utils.assert_masked_array_equal(cube_copy.data, cube.data) if cube.data.mask is not ma.nomask: # "No mask" is a constant : all other cases must be distinct. - self.assertIsNot(cube_copy.core_data().mask, cube.core_data().mask) + assert cube_copy.core_data().mask is not cube.core_data().mask else: - self.assertArrayEqual(cube_copy.data, cube.data) + _shared_utils.assert_array_equal(cube_copy.data, cube.data) def test(self): cube = stock.simple_3d() @@ -2003,7 +2131,7 @@ def _add_test_meshcube(self, nomesh=False, n_z=2, **meshcoord_kwargs): self.cube = cube -class Test_coords__mesh_coords(tests.IrisTest): +class Test_coords__mesh_coords: """Checking *only* the new "mesh_coords" keyword of the coord/coords methods. This is *not* attached to the existing tests for this area, as they are @@ -2011,7 +2139,8 @@ class Test_coords__mesh_coords(tests.IrisTest): """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Create a standard test cube with a variety of types of coord. _add_test_meshcube(self) @@ -2028,7 +2157,7 @@ def sortkey(item): items_a = sorted(items_a, key=sortkey) items_b = sorted(items_b, key=sortkey) - self.assertEqual(items_a, items_b) + assert items_a == items_b def test_coords__all__meshcoords_default(self): # coords() includes mesh-coords along with the others. @@ -2051,12 +2180,12 @@ def test_coords__all__meshcoords_omitted(self): def test_coords__axis__meshcoords(self): # Coord (singular) with axis + mesh_coords=True result = self.cube.coord(axis="x", mesh_coords=True) - self.assertIs(result, self.meshco_x) + assert result is self.meshco_x def test_coords__dimcoords__meshcoords(self): # dim_coords and mesh_coords should be mutually exclusive. result = self.cube.coords(dim_coords=True, mesh_coords=True) - self.assertEqual(result, []) + assert result == [] def test_coords__nodimcoords__meshcoords(self): # When mesh_coords=True, dim_coords=False should have no effect. @@ -2065,24 +2194,26 @@ def test_coords__nodimcoords__meshcoords(self): self._assert_lists_equal(expected, result) -class Test_mesh(tests.IrisTest): - def setUp(self): +class Test_mesh: + @pytest.fixture(autouse=True) + def _setup(self): # Create a standard test cube with a variety of types of coord. _add_test_meshcube(self) def test_mesh(self): result = self.cube.mesh - self.assertIs(result, self.mesh) + assert result is self.mesh def test_no_mesh(self): # Replace standard setUp cube with a no-mesh version. _add_test_meshcube(self, nomesh=True) result = self.cube.mesh - self.assertIsNone(result) + assert result is None -class Test_location(tests.IrisTest): - def setUp(self): +class Test_location: + @pytest.fixture(autouse=True) + def _setup(self): # Create a standard test cube with a variety of types of coord. _add_test_meshcube(self) @@ -2090,23 +2221,24 @@ def test_no_mesh(self): # Replace standard setUp cube with a no-mesh version. _add_test_meshcube(self, nomesh=True) result = self.cube.location - self.assertIsNone(result) + assert result is None def test_mesh(self): cube = self.cube result = cube.location - self.assertEqual(result, self.meshco_x.location) + assert result == self.meshco_x.location def test_alternate_location(self): # Replace standard setUp cube with an edge-based version. _add_test_meshcube(self, location="edge") cube = self.cube result = cube.location - self.assertEqual(result, "edge") + assert result == "edge" -class Test_mesh_dim(tests.IrisTest): - def setUp(self): +class Test_mesh_dim: + @pytest.fixture(autouse=True) + def _setup(self): # Create a standard test cube with a variety of types of coord. _add_test_meshcube(self) @@ -2114,12 +2246,12 @@ def test_no_mesh(self): # Replace standard setUp cube with a no-mesh version. _add_test_meshcube(self, nomesh=True) result = self.cube.mesh_dim() - self.assertIsNone(result) + assert result is None def test_mesh(self): cube = self.cube result = cube.mesh_dim() - self.assertEqual(result, 1) + assert result == 1 def test_alternate(self): # Replace standard setUp cube with an edge-based version. @@ -2128,16 +2260,17 @@ def test_alternate(self): # Transpose the cube : the mesh dim is then 0 cube.transpose() result = cube.mesh_dim() - self.assertEqual(result, 0) + assert result == 0 -class Test__init__mesh(tests.IrisTest): +class Test__init__mesh: """Test that creation with mesh-coords functions, and prevents a cube having incompatible mesh-coords. """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Create a standard test mesh and other useful components. mesh = sample_mesh() meshco = sample_meshcoord(mesh=mesh) @@ -2157,7 +2290,7 @@ def test_mesh(self): dim_coords_and_dims=[(dimco_z, 0), (dimco_mesh, 1)], aux_coords_and_dims=[(meshco, 1)], ) - self.assertEqual(cube.mesh, meshco.mesh) + assert cube.mesh == meshco.mesh def test_fail_dim_meshcoord(self): # As "test_mesh", but attempt to use the meshcoord as a dim-coord. @@ -2165,7 +2298,7 @@ def test_fail_dim_meshcoord(self): nz, n_faces = self.nz, self.n_faces dimco_z = DimCoord(np.arange(nz), long_name="z") meshco = self.meshco - with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): + with pytest.raises(ValueError, match="may not be an AuxCoord"): Cube( np.zeros((nz, n_faces)), dim_coords_and_dims=[(dimco_z, 0), (meshco, 1)], @@ -2179,7 +2312,7 @@ def test_multi_meshcoords(self): np.zeros(n_faces), aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], ) - self.assertEqual(cube.mesh, meshco_x.mesh) + assert cube.mesh == meshco_x.mesh def test_multi_meshcoords_same_axis(self): # *Not* an error, as long as the coords are distinguishable. @@ -2194,7 +2327,7 @@ def test_multi_meshcoords_same_axis(self): np.zeros(n_faces), aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], ) - self.assertEqual(cube.mesh, meshco_1.mesh) + assert cube.mesh == meshco_1.mesh def test_fail_meshcoords_different_locations(self): # Same as successful 'multi_mesh', but different locations. @@ -2203,10 +2336,10 @@ def test_fail_meshcoords_different_locations(self): meshco_1 = sample_meshcoord(axis="x", mesh=mesh, location="face") meshco_2 = sample_meshcoord(axis="y", mesh=mesh, location="edge") # They should still have the same *shape* (or would fail anyway) - self.assertEqual(meshco_1.shape, meshco_2.shape) + assert meshco_1.shape == meshco_2.shape n_faces = meshco_1.shape[0] msg = "does not match existing cube location" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): Cube( np.zeros(n_faces), aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], @@ -2226,7 +2359,7 @@ def test_fail_meshcoords_different_meshes(self): meshco_y = sample_meshcoord(axis="y") # Own (different) mesh meshco_y.mesh.long_name = "new_name" n_faces = meshco_x.shape[0] - with self.assertRaisesRegex(ValueError, "MeshXY.* does not match"): + with pytest.raises(ValueError, match="MeshXY.* does not match"): Cube( np.zeros(n_faces), aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], @@ -2240,20 +2373,21 @@ def test_fail_meshcoords_different_dims(self): meshco_x = sample_meshcoord(mesh=mesh, axis="x") meshco_y = sample_meshcoord(mesh=mesh, axis="y") msg = "does not match existing cube mesh dimension" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): Cube( np.zeros((n_z, n_faces)), aux_coords_and_dims=[(meshco_x, 1), (meshco_y, 0)], ) -class Test__add_aux_coord__mesh(tests.IrisTest): +class Test__add_aux_coord__mesh: """Test that "Cube.add_aux_coord" functions with a mesh-coord, and prevents a cube having incompatible mesh-coords. """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): _add_test_meshcube(self) # Remove the existing "meshco_y", so we can add similar ones without # needing to distinguish from the existing. @@ -2264,7 +2398,7 @@ def test_add_compatible(self): meshco_y = self.meshco_y # Add the y-meshco back into the cube. cube.add_aux_coord(meshco_y, 1) - self.assertIn(meshco_y, cube.coords(mesh_coords=True)) + assert meshco_y in cube.coords(mesh_coords=True) def test_add_multiple(self): # Show that we can add extra mesh coords. @@ -2276,7 +2410,7 @@ def test_add_multiple(self): new_meshco_y = meshco_y.copy() new_meshco_y.rename("alternative") cube.add_aux_coord(new_meshco_y, 1) - self.assertEqual(len(cube.coords(mesh_coords=True)), 3) + assert len(cube.coords(mesh_coords=True)) == 3 def test_add_equal_mesh(self): # Make a duplicate y-meshco, and rename so it can add into the cube. @@ -2284,7 +2418,7 @@ def test_add_equal_mesh(self): # Create 'meshco_y' duplicate, but a new mesh meshco_y = sample_meshcoord(axis="y") cube.add_aux_coord(meshco_y, 1) - self.assertIn(meshco_y, cube.coords(mesh_coords=True)) + assert meshco_y in cube.coords(mesh_coords=True) def test_fail_different_mesh(self): # Make a duplicate y-meshco, and rename so it can add into the cube. @@ -2293,7 +2427,7 @@ def test_fail_different_mesh(self): meshco_y = sample_meshcoord(axis="y") meshco_y.mesh.long_name = "new_name" msg = "does not match existing cube mesh" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): cube.add_aux_coord(meshco_y, 1) def test_fail_different_location(self): @@ -2306,7 +2440,7 @@ def test_fail_different_location(self): # Create a new meshco_y, same mesh but based on edges. meshco_y = sample_meshcoord(axis="y", mesh=self.mesh, location="edge") msg = "does not match existing cube location" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): cube.add_aux_coord(meshco_y, 1) def test_fail_different_dimension(self): @@ -2319,11 +2453,11 @@ def test_fail_different_dimension(self): # Attempt to re-attach the 'y' meshcoord, to a different cube dimension. msg = "does not match existing cube mesh dimension" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): cube.add_aux_coord(meshco_y, 0) -class Test__add_dim_coord__mesh(tests.IrisTest): +class Test__add_dim_coord__mesh: """Test that "Cube.add_dim_coord" cannot work with a mesh-coord.""" def test(self): @@ -2331,11 +2465,11 @@ def test(self): mesh = sample_mesh(n_faces=2) meshco = sample_meshcoord(mesh=mesh) cube = Cube([0, 1]) - with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): + with pytest.raises(ValueError, match="may not be an AuxCoord"): cube.add_dim_coord(meshco, 0) -class Test__eq__mesh(tests.IrisTest): +class Test__eq__mesh: """Check that cubes with meshes support == as expected. Note: there is no special code for this in iris.cube.Cube : it is @@ -2343,21 +2477,22 @@ class Test__eq__mesh(tests.IrisTest): """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Create a 'standard' test cube. _add_test_meshcube(self) def test_copied_cube_match(self): cube = self.cube cube2 = cube.copy() - self.assertEqual(cube, cube2) + assert cube == cube2 def test_equal_mesh_match(self): cube1 = self.cube # re-create an identical cube, using the same mesh. _add_test_meshcube(self) cube2 = self.cube - self.assertEqual(cube1, cube2) + assert cube1 == cube2 def test_new_mesh_different(self): cube1 = self.cube @@ -2365,11 +2500,12 @@ def test_new_mesh_different(self): _add_test_meshcube(self) self.cube.mesh.long_name = "new_name" cube2 = self.cube - self.assertNotEqual(cube1, cube2) + assert cube1 != cube2 -class Test_dtype(tests.IrisTest): - def setUp(self): +class Test_dtype: + @pytest.fixture(autouse=True) + def _setup(self): self.dtypes = ( np.dtype("int"), np.dtype("uint"), @@ -2381,56 +2517,56 @@ def test_real_data(self): for dtype in self.dtypes: data = np.array([0, 1], dtype=dtype) cube = Cube(data) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype def test_real_data_masked__mask_unset(self): for dtype in self.dtypes: data = ma.array([0, 1], dtype=dtype) cube = Cube(data) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype def test_real_data_masked__mask_set(self): for dtype in self.dtypes: data = ma.array([0, 1], dtype=dtype) data[0] = ma.masked cube = Cube(data) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype def test_lazy_data(self): for dtype in self.dtypes: data = np.array([0, 1], dtype=dtype) cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype # Check that accessing the dtype does not trigger loading # of the data. - self.assertTrue(cube.has_lazy_data()) + assert cube.has_lazy_data() def test_lazy_data_masked__mask_unset(self): for dtype in self.dtypes: data = ma.array([0, 1], dtype=dtype) cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype # Check that accessing the dtype does not trigger loading # of the data. - self.assertTrue(cube.has_lazy_data()) + assert cube.has_lazy_data() def test_lazy_data_masked__mask_set(self): for dtype in self.dtypes: data = ma.array([0, 1], dtype=dtype) data[0] = ma.masked cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) + assert cube.dtype == dtype # Check that accessing the dtype does not trigger loading # of the data. - self.assertTrue(cube.has_lazy_data()) + assert cube.has_lazy_data() -class TestSubset(tests.IrisTest): +class TestSubset: def test_scalar_coordinate(self): cube = Cube(0, long_name="apricot", units="1") cube.add_aux_coord(DimCoord([0], long_name="banana", units="1")) result = cube.subset(cube.coord("banana")) - self.assertEqual(cube, result) + assert cube == result def test_dimensional_coordinate(self): cube = Cube(np.zeros((4)), long_name="tinned_peach", units="1") @@ -2439,52 +2575,52 @@ def test_dimensional_coordinate(self): 0, ) result = cube.subset(cube.coord("sixteen_ton_weight")) - self.assertEqual(cube, result) + assert cube == result def test_missing_coordinate(self): cube = Cube(0, long_name="raspberry", units="1") cube.add_aux_coord(DimCoord([0], long_name="loganberry", units="1")) bad_coord = DimCoord([0], long_name="tiger", units="1") - self.assertRaises(CoordinateNotFoundError, cube.subset, bad_coord) + pytest.raises(CoordinateNotFoundError, cube.subset, bad_coord) def test_different_coordinate(self): cube = Cube(0, long_name="raspberry", units="1") cube.add_aux_coord(DimCoord([0], long_name="loganberry", units="1")) different_coord = DimCoord([2], long_name="loganberry", units="1") result = cube.subset(different_coord) - self.assertEqual(result, None) + assert result is None def test_different_coordinate_vector(self): cube = Cube([0, 1], long_name="raspberry", units="1") cube.add_dim_coord(DimCoord([0, 1], long_name="loganberry", units="1"), 0) different_coord = DimCoord([2], long_name="loganberry", units="1") result = cube.subset(different_coord) - self.assertEqual(result, None) + assert result is None def test_not_coordinate(self): cube = Cube(0, long_name="peach", units="1") cube.add_aux_coord(DimCoord([0], long_name="crocodile", units="1")) - self.assertRaises(ValueError, cube.subset, "Pointed Stick") + pytest.raises(ValueError, cube.subset, "Pointed Stick") -class Test_add_metadata(tests.IrisTest): +class Test_add_metadata: def test_add_dim_coord(self): cube = Cube(np.arange(3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 0) - self.assertEqual(cube.coord("x"), x_coord) + assert cube.coord("x") == x_coord def test_add_aux_coord(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="x") cube.add_aux_coord(x_coord, [0, 1]) - self.assertEqual(cube.coord("x"), x_coord) + assert cube.coord("x") == x_coord def test_add_cell_measure(self): cube = Cube(np.arange(6).reshape(2, 3)) a_cell_measure = CellMeasure(np.arange(6).reshape(2, 3), long_name="area") cube.add_cell_measure(a_cell_measure, [0, 1]) - self.assertEqual(cube.cell_measure("area"), a_cell_measure) + assert cube.cell_measure("area") == a_cell_measure def test_add_ancillary_variable(self): cube = Cube(np.arange(6).reshape(2, 3)) @@ -2492,9 +2628,7 @@ def test_add_ancillary_variable(self): data=np.arange(6).reshape(2, 3), long_name="detection quality" ) cube.add_ancillary_variable(ancillary_variable, [0, 1]) - self.assertEqual( - cube.ancillary_variable("detection quality"), ancillary_variable - ) + assert cube.ancillary_variable("detection quality") == ancillary_variable def test_add_valid_aux_factory(self): cube = Cube(np.arange(8).reshape(2, 2, 2)) @@ -2505,7 +2639,7 @@ def test_add_valid_aux_factory(self): cube.add_aux_coord(sigma, 0) cube.add_aux_coord(orog, (1, 2)) factory = HybridHeightFactory(delta=delta, sigma=sigma, orography=orog) - self.assertIsNone(cube.add_aux_factory(factory)) + assert cube.add_aux_factory(factory) is None def test_error_for_add_invalid_aux_factory(self): cube = Cube(np.arange(8).reshape(2, 2, 2), long_name="bar") @@ -2517,12 +2651,13 @@ def test_error_for_add_invalid_aux_factory(self): # Note orography is not added to the cube here factory = HybridHeightFactory(delta=delta, sigma=sigma, orography=orog) expected_error = "foo coordinate for factory is not present on cube bar" - with self.assertRaisesRegex(ValueError, expected_error): + with pytest.raises(ValueError, match=expected_error): cube.add_aux_factory(factory) -class Test_remove_metadata(tests.IrisTest): - def setUp(self): +class Test_remove_metadata: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 1) @@ -2542,45 +2677,42 @@ def setUp(self): def test_remove_dim_coord(self): self.cube.remove_coord(self.cube.coord("x")) - self.assertEqual(self.cube.coords("x"), []) + assert self.cube.coords("x") == [] def test_remove_aux_coord(self): self.cube.remove_coord(self.cube.coord("z")) - self.assertEqual(self.cube.coords("z"), []) + assert self.cube.coords("z") == [] def test_remove_cell_measure(self): self.cube.remove_cell_measure(self.cube.cell_measure("area")) - self.assertEqual( - self.cube._cell_measures_and_dims, [(self.b_cell_measure, (0, 1))] - ) + assert self.cube._cell_measures_and_dims == [(self.b_cell_measure, (0, 1))] def test_remove_cell_measure_by_name(self): self.cube.remove_cell_measure("area") - self.assertEqual( - self.cube._cell_measures_and_dims, [(self.b_cell_measure, (0, 1))] - ) + assert self.cube._cell_measures_and_dims == [(self.b_cell_measure, (0, 1))] def test_fail_remove_cell_measure_by_name(self): - with self.assertRaises(CellMeasureNotFoundError): + with pytest.raises(CellMeasureNotFoundError): self.cube.remove_cell_measure("notarea") def test_remove_ancilliary_variable(self): self.cube.remove_ancillary_variable( self.cube.ancillary_variable("Quality of Detection") ) - self.assertEqual(self.cube._ancillary_variables_and_dims, []) + assert self.cube._ancillary_variables_and_dims == [] def test_remove_ancilliary_variable_by_name(self): self.cube.remove_ancillary_variable("Quality of Detection") - self.assertEqual(self.cube._ancillary_variables_and_dims, []) + assert self.cube._ancillary_variables_and_dims == [] def test_fail_remove_ancilliary_variable_by_name(self): - with self.assertRaises(AncillaryVariableNotFoundError): + with pytest.raises(AncillaryVariableNotFoundError): self.cube.remove_ancillary_variable("notname") -class TestCoords(tests.IrisTest): - def setUp(self): +class TestCoords: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 1) @@ -2594,19 +2726,20 @@ def test_bad_coord(self): "Expected to find exactly 1 coordinate matching the given " "'x' coordinate's metadata, but found none." ) - with self.assertRaisesRegex(CoordinateNotFoundError, re): + with pytest.raises(CoordinateNotFoundError, match=re): _ = self.cube.coord(bad_coord) -class Test_coord_division_units(tests.IrisTest): +class Test_coord_division_units: def test(self): aux = AuxCoord(1, long_name="length", units="metres") cube = Cube(1, units="seconds") - self.assertEqual((aux / cube).units, "m.s-1") + assert (aux / cube).units == "m.s-1" -class Test__getitem_CellMeasure(tests.IrisTest): - def setUp(self): +class Test__getitem_CellMeasure: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 1) @@ -2620,17 +2753,18 @@ def setUp(self): def test_cell_measure_2d(self): result = self.cube[0:2, 0:2] - self.assertEqual(len(result.cell_measures()), 1) - self.assertEqual(result.shape, result.cell_measures()[0].data.shape) + assert len(result.cell_measures()) == 1 + assert result.shape == result.cell_measures()[0].data.shape def test_cell_measure_1d(self): result = self.cube[0, 0:2] - self.assertEqual(len(result.cell_measures()), 1) - self.assertEqual(result.shape, result.cell_measures()[0].data.shape) + assert len(result.cell_measures()) == 1 + assert result.shape == result.cell_measures()[0].data.shape -class Test__getitem_AncillaryVariables(tests.IrisTest): - def setUp(self): +class Test__getitem_AncillaryVariables: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 1) @@ -2646,17 +2780,18 @@ def setUp(self): def test_ancillary_variables_2d(self): result = self.cube[0:2, 0:2] - self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual(result.shape, result.ancillary_variables()[0].data.shape) + assert len(result.ancillary_variables()) == 1 + assert result.shape == result.ancillary_variables()[0].data.shape def test_ancillary_variables_1d(self): result = self.cube[0, 0:2] - self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual(result.shape, result.ancillary_variables()[0].data.shape) + assert len(result.ancillary_variables()) == 1 + assert result.shape == result.ancillary_variables()[0].data.shape -class TestAncillaryVariables(tests.IrisTest): - def setUp(self): +class TestAncillaryVariables: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(10 * np.arange(6).reshape(2, 3)) self.ancill_var = AncillaryVariable( np.arange(6).reshape(2, 3), @@ -2668,49 +2803,50 @@ def setUp(self): def test_get_ancillary_variable(self): ancill_var = self.cube.ancillary_variable("number_of_observations") - self.assertEqual(ancill_var, self.ancill_var) + assert ancill_var == self.ancill_var def test_get_ancillary_variables(self): ancill_vars = self.cube.ancillary_variables("number_of_observations") - self.assertEqual(len(ancill_vars), 1) - self.assertEqual(ancill_vars[0], self.ancill_var) + assert len(ancill_vars) == 1 + assert ancill_vars[0] == self.ancill_var def test_get_ancillary_variable_obj(self): ancill_vars = self.cube.ancillary_variables(self.ancill_var) - self.assertEqual(len(ancill_vars), 1) - self.assertEqual(ancill_vars[0], self.ancill_var) + assert len(ancill_vars) == 1 + assert ancill_vars[0] == self.ancill_var def test_fail_get_ancillary_variables(self): - with self.assertRaises(AncillaryVariableNotFoundError): + with pytest.raises(AncillaryVariableNotFoundError): self.cube.ancillary_variable("other_ancill_var") def test_fail_get_ancillary_variables_obj(self): ancillary_variable = self.ancill_var.copy() ancillary_variable.long_name = "Number of observations at site" - with self.assertRaises(AncillaryVariableNotFoundError): + with pytest.raises(AncillaryVariableNotFoundError): self.cube.ancillary_variable(ancillary_variable) def test_ancillary_variable_dims(self): ancill_var_dims = self.cube.ancillary_variable_dims(self.ancill_var) - self.assertEqual(ancill_var_dims, (0, 1)) + assert ancill_var_dims == (0, 1) def test_fail_ancill_variable_dims(self): ancillary_variable = self.ancill_var.copy() ancillary_variable.long_name = "Number of observations at site" - with self.assertRaises(AncillaryVariableNotFoundError): + with pytest.raises(AncillaryVariableNotFoundError): self.cube.ancillary_variable_dims(ancillary_variable) def test_ancillary_variable_dims_by_name(self): ancill_var_dims = self.cube.ancillary_variable_dims("number_of_observations") - self.assertEqual(ancill_var_dims, (0, 1)) + assert ancill_var_dims == (0, 1) def test_fail_ancillary_variable_dims_by_name(self): - with self.assertRaises(AncillaryVariableNotFoundError): + with pytest.raises(AncillaryVariableNotFoundError): self.cube.ancillary_variable_dims("notname") -class TestCellMeasures(tests.IrisTest): - def setUp(self): +class TestCellMeasures: + @pytest.fixture(autouse=True) + def _setup(self): cube = Cube(np.arange(6).reshape(2, 3)) x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") cube.add_dim_coord(x_coord, 1) @@ -2724,49 +2860,50 @@ def setUp(self): def test_get_cell_measure(self): cm = self.cube.cell_measure("area") - self.assertEqual(cm, self.a_cell_measure) + assert cm == self.a_cell_measure def test_get_cell_measures(self): cms = self.cube.cell_measures() - self.assertEqual(len(cms), 1) - self.assertEqual(cms[0], self.a_cell_measure) + assert len(cms) == 1 + assert cms[0] == self.a_cell_measure def test_get_cell_measures_obj(self): cms = self.cube.cell_measures(self.a_cell_measure) - self.assertEqual(len(cms), 1) - self.assertEqual(cms[0], self.a_cell_measure) + assert len(cms) == 1 + assert cms[0] == self.a_cell_measure def test_fail_get_cell_measure(self): - with self.assertRaises(CellMeasureNotFoundError): + with pytest.raises(CellMeasureNotFoundError): _ = self.cube.cell_measure("notarea") def test_fail_get_cell_measures_obj(self): a_cell_measure = self.a_cell_measure.copy() a_cell_measure.units = "km2" - with self.assertRaises(CellMeasureNotFoundError): + with pytest.raises(CellMeasureNotFoundError): _ = self.cube.cell_measure(a_cell_measure) def test_cell_measure_dims(self): cm_dims = self.cube.cell_measure_dims(self.a_cell_measure) - self.assertEqual(cm_dims, (0, 1)) + assert cm_dims == (0, 1) def test_fail_cell_measure_dims(self): a_cell_measure = self.a_cell_measure.copy() a_cell_measure.units = "km2" - with self.assertRaises(CellMeasureNotFoundError): + with pytest.raises(CellMeasureNotFoundError): _ = self.cube.cell_measure_dims(a_cell_measure) def test_cell_measure_dims_by_name(self): cm_dims = self.cube.cell_measure_dims("area") - self.assertEqual(cm_dims, (0, 1)) + assert cm_dims == (0, 1) def test_fail_cell_measure_dims_by_name(self): - with self.assertRaises(CellMeasureNotFoundError): + with pytest.raises(CellMeasureNotFoundError): self.cube.cell_measure_dims("notname") -class Test_transpose(tests.IrisTest): - def setUp(self): +class Test_transpose: + @pytest.fixture(autouse=True) + def _setup(self): self.data = np.arange(24).reshape(3, 2, 4) self.cube = Cube(self.data) self.lazy_cube = Cube(as_lazy_data(self.data)) @@ -2774,28 +2911,28 @@ def setUp(self): def test_lazy_data(self): cube = self.lazy_cube cube.transpose() - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.T, cube.data) + assert cube.has_lazy_data() + _shared_utils.assert_array_equal(self.data.T, cube.data) def test_real_data(self): self.cube.transpose() - self.assertFalse(self.cube.has_lazy_data()) - self.assertIs(self.data.base, self.cube.data.base) - self.assertArrayEqual(self.data.T, self.cube.data) + assert not self.cube.has_lazy_data() + assert self.data.base is self.cube.data.base + _shared_utils.assert_array_equal(self.data.T, self.cube.data) def test_real_data__new_order(self): new_order = [2, 0, 1] self.cube.transpose(new_order) - self.assertFalse(self.cube.has_lazy_data()) - self.assertIs(self.data.base, self.cube.data.base) - self.assertArrayEqual(self.data.transpose(new_order), self.cube.data) + assert not self.cube.has_lazy_data() + assert self.data.base is self.cube.data.base + _shared_utils.assert_array_equal(self.data.transpose(new_order), self.cube.data) def test_lazy_data__new_order(self): new_order = [2, 0, 1] cube = self.lazy_cube cube.transpose(new_order) - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.transpose(new_order), cube.data) + assert cube.has_lazy_data() + _shared_utils.assert_array_equal(self.data.transpose(new_order), cube.data) def test_lazy_data__transpose_order_ndarray(self): # Check that a transpose order supplied as an array does not trip up @@ -2803,31 +2940,31 @@ def test_lazy_data__transpose_order_ndarray(self): new_order = np.array([2, 0, 1]) cube = self.lazy_cube cube.transpose(new_order) - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.transpose(new_order), cube.data) + assert cube.has_lazy_data() + _shared_utils.assert_array_equal(self.data.transpose(new_order), cube.data) def test_bad_transpose_order(self): exp_emsg = "Incorrect number of dimensions" - with self.assertRaisesRegex(ValueError, exp_emsg): + with pytest.raises(ValueError, match=exp_emsg): self.cube.transpose([1]) def test_dim_coords(self): x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") self.cube.add_dim_coord(x_coord, 0) self.cube.transpose() - self.assertEqual(self.cube._dim_coords_and_dims, [(x_coord, 2)]) + assert self.cube._dim_coords_and_dims == [(x_coord, 2)] def test_aux_coords(self): x_coord = AuxCoord(points=np.array([[2, 3], [8, 4], [7, 9]]), long_name="x") self.cube.add_aux_coord(x_coord, (0, 1)) self.cube.transpose() - self.assertEqual(self.cube._aux_coords_and_dims, [(x_coord, (2, 1))]) + assert self.cube._aux_coords_and_dims == [(x_coord, (2, 1))] def test_cell_measures(self): area_cm = CellMeasure(np.arange(12).reshape(3, 4), long_name="area of cells") self.cube.add_cell_measure(area_cm, (0, 2)) self.cube.transpose() - self.assertEqual(self.cube._cell_measures_and_dims, [(area_cm, (2, 0))]) + assert self.cube._cell_measures_and_dims == [(area_cm, (2, 0))] def test_ancillary_variables(self): ancill_var = AncillaryVariable( @@ -2835,19 +2972,17 @@ def test_ancillary_variables(self): ) self.cube.add_ancillary_variable(ancill_var, (1, 2)) self.cube.transpose() - self.assertEqual( - self.cube._ancillary_variables_and_dims, [(ancill_var, (1, 0))] - ) + assert self.cube._ancillary_variables_and_dims == [(ancill_var, (1, 0))] -class Test_convert_units(tests.IrisTest): +class Test_convert_units: def test_convert_unknown_units(self): cube = iris.cube.Cube(1) emsg = ( "Cannot convert from unknown units. " 'The "cube.units" attribute may be set directly.' ) - with self.assertRaisesRegex(UnitConversionError, emsg): + with pytest.raises(UnitConversionError, match=emsg): cube.convert_units("mm day-1") def test_preserves_lazy(self): @@ -2856,8 +2991,8 @@ def test_preserves_lazy(self): cube = iris.cube.Cube(lazy_data, units="m") real_data_ft = Unit("m").convert(real_data, "ft") cube.convert_units("ft") - self.assertTrue(cube.has_lazy_data()) - self.assertArrayAllClose(cube.data, real_data_ft) + assert cube.has_lazy_data() + _shared_utils.assert_array_all_close(cube.data, real_data_ft) def test_unit_multiply(self): _client = Client() @@ -2868,22 +3003,22 @@ def test_unit_multiply(self): _client.close() -class Test__eq__data(tests.IrisTest): +class Test__eq__data: """Partial cube equality testing, for data type only.""" def test_cube_identical_to_itself(self): cube = Cube([1.0]) - self.assertTrue(cube == cube) + assert cube == cube def test_data_float_eq(self): cube1 = Cube([1.0]) cube2 = Cube([1.0]) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_data_float_nan_eq(self): cube1 = Cube([np.nan, 1.0]) cube2 = Cube([np.nan, 1.0]) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_data_float_eqtol(self): val1 = np.array(1.0, dtype=np.float32) @@ -2892,45 +3027,45 @@ def test_data_float_eqtol(self): val2 = np.array(1.0 + 1.0e-6, dtype=np.float32) cube1 = Cube([val1]) cube2 = Cube([val2]) - self.assertNotEqual(val1, val2) - self.assertTrue(cube1 == cube2) + assert val1 != val2 + assert cube1 == cube2 def test_data_float_not_eq(self): val1 = 1.0 val2 = 1.0 + 1.0e-4 cube1 = Cube([1.0, val1]) cube2 = Cube([1.0, val2]) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_data_int_eq(self): cube1 = Cube([1, 2, 3]) cube2 = Cube([1, 2, 3]) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_data_int_not_eq(self): cube1 = Cube([1, 2, 3]) cube2 = Cube([1, 2, 0]) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 # NOTE: since numpy v1.18, boolean array subtract is deprecated. def test_data_bool_eq(self): cube1 = Cube([True, False]) cube2 = Cube([True, False]) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_data_bool_not_eq(self): cube1 = Cube([True, False]) cube2 = Cube([True, True]) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 -class Test__eq__meta(tests.IrisTest): +class Test__eq__meta: def test_ancillary_fail(self): cube1 = Cube([0, 1]) cube2 = Cube([0, 1]) avr = AncillaryVariable([2, 3], long_name="foo") cube2.add_ancillary_variable(avr, 0) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_ancillary_reorder(self): cube1 = Cube([0, 1]) @@ -2943,7 +3078,7 @@ def test_ancillary_reorder(self): cube1.add_ancillary_variable(avr2, 0) cube2.add_ancillary_variable(avr2, 0) cube2.add_ancillary_variable(avr1, 0) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_ancillary_diff_data(self): cube1 = Cube([0, 1]) @@ -2952,14 +3087,14 @@ def test_ancillary_diff_data(self): avr2 = AncillaryVariable([4, 5], long_name="foo") cube1.add_ancillary_variable(avr1, 0) cube2.add_ancillary_variable(avr2, 0) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_cell_measure_fail(self): cube1 = Cube([0, 1]) cube2 = Cube([0, 1]) cms = CellMeasure([2, 3], long_name="foo") cube2.add_cell_measure(cms, 0) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_cell_measure_reorder(self): cube1 = Cube([0, 1]) @@ -2972,7 +3107,7 @@ def test_cell_measure_reorder(self): cube1.add_cell_measure(cms2, 0) cube2.add_cell_measure(cms2, 0) cube2.add_cell_measure(cms1, 0) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 def test_cell_measure_diff_data(self): cube1 = Cube([0, 1]) @@ -2981,14 +3116,14 @@ def test_cell_measure_diff_data(self): cms2 = CellMeasure([4, 5], long_name="foo") cube1.add_cell_measure(cms1, 0) cube2.add_cell_measure(cms2, 0) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_cell_method_fail(self): cube1 = Cube([0, 1]) cube2 = Cube([0, 1]) cmth = CellMethod("mean", "time", "6hr") cube2.add_cell_method(cmth) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 # Unlike cell measures, cell methods are order sensitive. def test_cell_method_reorder_fail(self): @@ -3002,7 +3137,7 @@ def test_cell_method_reorder_fail(self): cube1.add_cell_method(cmth2) cube2.add_cell_method(cmth2) cube2.add_cell_method(cmth1) - self.assertFalse(cube1 == cube2) + assert cube1 != cube2 def test_cell_method_correct_order(self): cube1 = Cube([0, 1]) @@ -3015,10 +3150,10 @@ def test_cell_method_correct_order(self): cube1.add_cell_method(cmth2) cube2.add_cell_method(cmth1) cube2.add_cell_method(cmth2) - self.assertTrue(cube1 == cube2) + assert cube1 == cube2 -@pytest.fixture +@pytest.fixture() def simplecube(): return stock.simple_2d_w_cell_measure_ancil_var() @@ -3096,14 +3231,14 @@ class TestReprs: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture - def patched_cubeprinter(self): + @pytest.fixture() + def patched_cubeprinter(self, mocker): target = "iris._representation.cube_printout.CubePrinter" instance_mock = mock.MagicMock( to_string=mock.MagicMock(return_value="") # NB this must return a string ) - with mock.patch(target, return_value=instance_mock) as class_mock: - yield class_mock, instance_mock + class_mock = mocker.patch(target, return_value=instance_mock) + yield class_mock, instance_mock @staticmethod def _check_expected_effects(simplecube, patched_cubeprinter, oneline, padding): @@ -3153,14 +3288,14 @@ class TestHtmlRepr: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture - def patched_cubehtml(self): + @pytest.fixture() + def patched_cubehtml(self, mocker): target = "iris.experimental.representation.CubeRepresentation" instance_mock = mock.MagicMock( repr_html=mock.MagicMock(return_value="") # NB this must return a string ) - with mock.patch(target, return_value=instance_mock) as class_mock: - yield class_mock, instance_mock + class_mock = mocker.patch(target, return_value=instance_mock) + yield class_mock, instance_mock @staticmethod def test__repr_html__effects(simplecube, patched_cubehtml): @@ -3179,7 +3314,7 @@ def test__repr_html__effects(simplecube, patched_cubehtml): class Test__cell_methods: @pytest.fixture(autouse=True) - def cell_measures_testdata(self): + def _setup(self): self.cube = Cube([0]) self.cm = CellMethod("mean", "time", "6hr") self.cm2 = CellMethod("max", "latitude", "4hr") diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index 50de4541e0..4ba65913c6 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -14,7 +14,7 @@ from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS -@pytest.fixture +@pytest.fixture() def sample_attrs() -> CubeAttrsDict: return CubeAttrsDict(locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"}) @@ -367,10 +367,10 @@ def test_local_global_masking(self, sample_attrs): sample_attrs.globals["z"] == "other" assert sample_attrs["z"] == "new" - @pytest.mark.parametrize("globals_or_locals", ("globals", "locals")) + @pytest.mark.parametrize("globals_or_locals", ["globals", "locals"]) @pytest.mark.parametrize( "value_type", - ("replace", "emptylist", "emptytuple", "none", "zero", "false"), + ["replace", "emptylist", "emptytuple", "none", "zero", "false"], ) def test_replace_subdict(self, globals_or_locals, value_type): # Writing to attrs.xx always replaces content with a *new* LimitedAttributeDict diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 72ca7d2306..62e63e6694 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -4,16 +4,13 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.CubeList` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import collections import copy from unittest import mock from cf_units import Unit import numpy as np +import pytest from iris import Constraint import iris.coord_systems @@ -21,31 +18,34 @@ from iris.cube import Cube, CubeList import iris.exceptions from iris.fileformats.pp import STASH +from iris.tests import _shared_utils import iris.tests.stock NOT_CUBE_MSG = "cannot be put in a cubelist, as it is not a Cube." NON_ITERABLE_MSG = "object is not iterable" -class Test_append(tests.IrisTest): - def setUp(self): +class Test_append: + @pytest.fixture(autouse=True) + def _setup(self): self.cubelist = iris.cube.CubeList() self.cube1 = iris.cube.Cube(1, long_name="foo") self.cube2 = iris.cube.Cube(1, long_name="bar") def test_pass(self): self.cubelist.append(self.cube1) - self.assertEqual(self.cubelist[-1], self.cube1) + assert self.cubelist[-1] == self.cube1 self.cubelist.append(self.cube2) - self.assertEqual(self.cubelist[-1], self.cube2) + assert self.cubelist[-1] == self.cube2 def test_fail(self): - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist.append(None) -class Test_concatenate_cube(tests.IrisTest): - def setUp(self): +class Test_concatenate_cube: + @pytest.fixture(autouse=True) + def _setup(self): self.units = Unit("days since 1970-01-01 00:00:00", calendar="standard") self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") self.cube1.add_dim_coord(DimCoord([0, 1, 2], "time", units=self.units), 0) @@ -54,13 +54,13 @@ def test_pass(self): self.cube2 = Cube([1, 2, 3], "air_temperature", units="K") self.cube2.add_dim_coord(DimCoord([3, 4, 5], "time", units=self.units), 0) result = CubeList([self.cube1, self.cube2]).concatenate_cube() - self.assertIsInstance(result, Cube) + assert isinstance(result, Cube) def test_fail(self): units = Unit("days since 1970-01-02 00:00:00", calendar="standard") cube2 = Cube([1, 2, 3], "air_temperature", units="K") cube2.add_dim_coord(DimCoord([0, 1, 2], "time", units=units), 0) - with self.assertRaises(iris.exceptions.ConcatenateError): + with pytest.raises(iris.exceptions.ConcatenateError): CubeList([self.cube1, cube2]).concatenate_cube() def test_names_differ_fail(self): @@ -69,17 +69,18 @@ def test_names_differ_fail(self): self.cube3 = Cube([1, 2, 3], "air_pressure", units="Pa") self.cube3.add_dim_coord(DimCoord([3, 4, 5], "time", units=self.units), 0) exc_regexp = "Cube names differ: air_temperature != air_pressure" - with self.assertRaisesRegex(iris.exceptions.ConcatenateError, exc_regexp): + with pytest.raises(iris.exceptions.ConcatenateError, match=exc_regexp): CubeList([self.cube1, self.cube2, self.cube3]).concatenate_cube() def test_empty(self): exc_regexp = "can't concatenate an empty CubeList" - with self.assertRaisesRegex(ValueError, exc_regexp): + with pytest.raises(ValueError, match=exc_regexp): CubeList([]).concatenate_cube() -class Test_extend(tests.IrisTest): - def setUp(self): +class Test_extend: + @pytest.fixture(autouse=True) + def _setup(self): self.cube1 = iris.cube.Cube(1, long_name="foo") self.cube2 = iris.cube.Cube(1, long_name="bar") self.cubelist1 = iris.cube.CubeList([self.cube1]) @@ -88,21 +89,22 @@ def setUp(self): def test_pass(self): cubelist = copy.copy(self.cubelist1) cubelist.extend(self.cubelist2) - self.assertEqual(cubelist, self.cubelist1 + self.cubelist2) + assert cubelist == self.cubelist1 + self.cubelist2 cubelist.extend([self.cube2]) - self.assertEqual(cubelist[-1], self.cube2) + assert cubelist[-1] == self.cube2 def test_fail(self): - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist1.extend(self.cube1) - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist1.extend(None) - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist1.extend(range(3)) -class Test_extract_overlapping(tests.IrisTest): - def setUp(self): +class Test_extract_overlapping: + @pytest.fixture(autouse=True) + def _setup(self): shape = (6, 14, 19) n_time, n_lat, n_lon = shape n_data = n_time * n_lat * n_lon @@ -133,32 +135,33 @@ def setUp(self): def test_extract_one_str_dim(self): cubes = iris.cube.CubeList([self.cube[2:], self.cube[:4]]) a, b = cubes.extract_overlapping("time") - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) + assert a.coord("time") == self.cube.coord("time")[2:4] + assert b.coord("time") == self.cube.coord("time")[2:4] def test_extract_one_list_dim(self): cubes = iris.cube.CubeList([self.cube[2:], self.cube[:4]]) a, b = cubes.extract_overlapping(["time"]) - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) + assert a.coord("time") == self.cube.coord("time")[2:4] + assert b.coord("time") == self.cube.coord("time")[2:4] def test_extract_two_dims(self): cubes = iris.cube.CubeList([self.cube[2:, 5:], self.cube[:4, :10]]) a, b = cubes.extract_overlapping(["time", "latitude"]) - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(a.coord("latitude"), self.cube.coord("latitude")[5:10]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(b.coord("latitude"), self.cube.coord("latitude")[5:10]) + assert a.coord("time") == self.cube.coord("time")[2:4] + assert a.coord("latitude") == self.cube.coord("latitude")[5:10] + assert b.coord("time") == self.cube.coord("time")[2:4] + assert b.coord("latitude") == self.cube.coord("latitude")[5:10] def test_different_orders(self): cubes = iris.cube.CubeList([self.cube[::-1][:4], self.cube[:4]]) a, b = cubes.extract_overlapping("time") - self.assertEqual(a.coord("time"), self.cube[::-1].coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) + assert a.coord("time") == self.cube[::-1].coord("time")[2:4] + assert b.coord("time") == self.cube.coord("time")[2:4] -class Test_iadd(tests.IrisTest): - def setUp(self): +class Test_iadd: + @pytest.fixture(autouse=True) + def _setup(self): self.cube1 = iris.cube.Cube(1, long_name="foo") self.cube2 = iris.cube.Cube(1, long_name="bar") self.cubelist1 = iris.cube.CubeList([self.cube1]) @@ -167,36 +170,38 @@ def setUp(self): def test_pass(self): cubelist = copy.copy(self.cubelist1) cubelist += self.cubelist2 - self.assertEqual(cubelist, self.cubelist1 + self.cubelist2) + assert cubelist == self.cubelist1 + self.cubelist2 cubelist += [self.cube2] - self.assertEqual(cubelist[-1], self.cube2) + assert cubelist[-1] == self.cube2 def test_fail(self): - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist1 += self.cube1 - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist1 += 1.0 - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist1 += range(3) -class Test_insert(tests.IrisTest): - def setUp(self): +class Test_insert: + @pytest.fixture(autouse=True) + def _setup(self): self.cube1 = iris.cube.Cube(1, long_name="foo") self.cube2 = iris.cube.Cube(1, long_name="bar") self.cubelist = iris.cube.CubeList([self.cube1] * 3) def test_pass(self): self.cubelist.insert(1, self.cube2) - self.assertEqual(self.cubelist[1], self.cube2) + assert self.cubelist[1] == self.cube2 def test_fail(self): - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist.insert(0, None) -class Test_merge_cube(tests.IrisTest): - def setUp(self): +class Test_merge_cube: + @pytest.fixture(autouse=True) + def _setup(self): self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") self.cube1.add_aux_coord(AuxCoord([0], "height", units="m")) @@ -204,29 +209,33 @@ def test_pass(self): cube2 = self.cube1.copy() cube2.coord("height").points = [1] result = CubeList([self.cube1, cube2]).merge_cube() - self.assertIsInstance(result, Cube) + assert isinstance(result, Cube) def test_fail(self): cube2 = self.cube1.copy() cube2.rename("not air temperature") - with self.assertRaises(iris.exceptions.MergeError): + with pytest.raises(iris.exceptions.MergeError): CubeList([self.cube1, cube2]).merge_cube() def test_empty(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): CubeList([]).merge_cube() def test_single_cube(self): result = CubeList([self.cube1]).merge_cube() - self.assertEqual(result, self.cube1) - self.assertIsNot(result, self.cube1) + assert result == self.cube1 + assert result is not self.cube1 def test_repeated_cube(self): - with self.assertRaises(iris.exceptions.MergeError): + with pytest.raises(iris.exceptions.MergeError): CubeList([self.cube1, self.cube1]).merge_cube() -class Test_merge__time_triple(tests.IrisTest): +class Test_merge__time_triple: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request + @staticmethod def _make_cube(fp, rt, t, realization=None): cube = Cube(np.arange(20).reshape(4, 5)) @@ -259,7 +268,7 @@ def test_orthogonal_with_realization(self): en2_cubes = [self._make_cube(*triple, realization=2) for triple in triples] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) + _shared_utils.assert_CML(self.request, cube, checksum=False) def test_combination_with_realization(self): # => fp, rt, t: 8; realization: 2 @@ -277,7 +286,7 @@ def test_combination_with_realization(self): en2_cubes = [self._make_cube(*triple, realization=2) for triple in triples] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) + _shared_utils.assert_CML(self.request, cube, checksum=False) def test_combination_with_extra_realization(self): # => fp, rt, t, realization: 17 @@ -298,7 +307,7 @@ def test_combination_with_extra_realization(self): en3_cubes = [self._make_cube(0, 10, 2, realization=3)] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) + CubeList(en3_cubes) (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) + _shared_utils.assert_CML(self.request, cube, checksum=False) def test_combination_with_extra_triple(self): # => fp, rt, t, realization: 17 @@ -320,11 +329,12 @@ def test_combination_with_extra_triple(self): ] cubes = CubeList(en1_cubes) + CubeList(en2_cubes) (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) + _shared_utils.assert_CML(self.request, cube, checksum=False) -class Test_setitem(tests.IrisTest): - def setUp(self): +class Test_setitem: + @pytest.fixture(autouse=True) + def _setup(self): self.cube1 = iris.cube.Cube(1, long_name="foo") self.cube2 = iris.cube.Cube(1, long_name="bar") self.cube3 = iris.cube.Cube(1, long_name="boo") @@ -332,41 +342,40 @@ def setUp(self): def test_pass(self): self.cubelist[1] = self.cube2 - self.assertEqual(self.cubelist[1], self.cube2) + assert self.cubelist[1] == self.cube2 self.cubelist[:2] = (self.cube2, self.cube3) - self.assertEqual( - self.cubelist, - iris.cube.CubeList([self.cube2, self.cube3, self.cube1]), - ) + assert self.cubelist == iris.cube.CubeList([self.cube2, self.cube3, self.cube1]) def test_fail(self): - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist[0] = None - with self.assertRaisesRegex(ValueError, NOT_CUBE_MSG): + with pytest.raises(ValueError, match=NOT_CUBE_MSG): self.cubelist[0:2] = [self.cube3, None] - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist[:1] = 2.5 - with self.assertRaisesRegex(TypeError, NON_ITERABLE_MSG): + with pytest.raises(TypeError, match=NON_ITERABLE_MSG): self.cubelist[:1] = self.cube1 -class Test_xml(tests.IrisTest): - def setUp(self): +class Test_xml: + @pytest.fixture(autouse=True) + def _setup(self): self.cubes = CubeList([Cube(np.arange(3)), Cube(np.arange(3))]) def test_byteorder_default(self): - self.assertIn("byteorder", self.cubes.xml()) + assert "byteorder" in self.cubes.xml() def test_byteorder_false(self): - self.assertNotIn("byteorder", self.cubes.xml(byteorder=False)) + assert "byteorder" not in self.cubes.xml(byteorder=False) def test_byteorder_true(self): - self.assertIn("byteorder", self.cubes.xml(byteorder=True)) + assert "byteorder" in self.cubes.xml(byteorder=True) -class Test_extract(tests.IrisTest): - def setUp(self): +class Test_extract: + @pytest.fixture(autouse=True) + def _setup(self): self.scalar_cubes = CubeList() for i in range(5): for letter in "abcd": @@ -376,7 +385,7 @@ def test_scalar_cube_name_constraint(self): # Test the name based extraction of a CubeList containing scalar cubes. res = self.scalar_cubes.extract("a") expected = CubeList([Cube(i, long_name="a") for i in range(5)]) - self.assertEqual(res, expected) + assert res == expected def test_scalar_cube_data_constraint(self): # Test the extraction of a CubeList containing scalar cubes @@ -385,7 +394,7 @@ def test_scalar_cube_data_constraint(self): constraint = iris.Constraint(cube_func=lambda c: c.data == val) res = self.scalar_cubes.extract(constraint) expected = CubeList([Cube(val, long_name=letter) for letter in "abcd"]) - self.assertEqual(res, expected) + assert res == expected class ExtractMixin: @@ -393,7 +402,8 @@ class ExtractMixin: # Effectively "abstract" -- inheritor must define this property : # method_name = 'extract_cube' / 'extract_cubes' - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube_x = Cube(0, long_name="x") self.cube_y = Cube(0, long_name="y") self.cons_x = Constraint("x") @@ -412,20 +422,18 @@ def check_extract(self, cubes, constraints, expected): cubelist = CubeList(cubes) method = getattr(cubelist, self.method_name) if isinstance(expected, str): - with self.assertRaisesRegex( - iris.exceptions.ConstraintMismatchError, expected - ): + with pytest.raises(iris.exceptions.ConstraintMismatchError, match=expected): method(constraints) else: result = method(constraints) if expected is None: - self.assertIsNone(result) + assert result is None elif isinstance(expected, Cube): - self.assertIsInstance(result, Cube) - self.assertEqual(result, expected) + assert isinstance(result, Cube) + assert result == expected elif isinstance(expected, list): - self.assertIsInstance(result, CubeList) - self.assertEqual(result, expected) + assert isinstance(result, CubeList) + assert result == expected else: msg = ( 'Unhandled usage in "check_extract" call: ' @@ -434,7 +442,7 @@ def check_extract(self, cubes, constraints, expected): raise ValueError(msg.format(type(expected), expected)) -class Test_extract_cube(ExtractMixin, tests.IrisTest): +class Test_extract_cube(ExtractMixin): method_name = "extract_cube" def test_empty(self): @@ -466,7 +474,7 @@ def test_none_as_constraint(self): def test_constraint_in_list__fail(self): # Check that we *cannot* use [constraint] msg = "cannot be cast to a constraint" - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): self.check_extract([], [self.cons_x], []) def test_multi_cube_ok(self): @@ -493,7 +501,7 @@ class ExtractCubesMixin(ExtractMixin): method_name = "extract_cubes" -class Test_extract_cubes__noconstraint(ExtractCubesMixin, tests.IrisTest): +class Test_extract_cubes__noconstraint(ExtractCubesMixin): """Test with an empty list of constraints.""" def test_empty(self): @@ -553,23 +561,19 @@ def test_multi_cube__fail_too_many(self): ) -class Test_extract_cubes__bare_single_constraint( - ExtractCubesSingleConstraintMixin, tests.IrisTest -): +class Test_extract_cubes__bare_single_constraint(ExtractCubesSingleConstraintMixin): """Testing with a single constraint as the argument.""" wrap_test_constraint_as_list_of_one = False -class Test_extract_cubes__list_single_constraint( - ExtractCubesSingleConstraintMixin, tests.IrisTest -): +class Test_extract_cubes__list_single_constraint(ExtractCubesSingleConstraintMixin): """Testing with a list of one constraint as the argument.""" wrap_test_constraint_as_list_of_one = True -class Test_extract_cubes__multi_constraints(ExtractCubesMixin, tests.IrisTest): +class Test_extract_cubes__multi_constraints(ExtractCubesMixin): """Testing when the 'constraints' arg is a list of multiple constraints.""" def test_empty(self): @@ -627,61 +631,64 @@ def test_multi_cube__fail_too_many(self): ) -class Test_iteration(tests.IrisTest): - def setUp(self): +class Test_iteration: + @pytest.fixture(autouse=True) + def _setup(self): self.scalar_cubes = CubeList() for i in range(5): for letter in "abcd": self.scalar_cubes.append(Cube(i, long_name=letter)) def test_iterable(self): - self.assertIsInstance(self.scalar_cubes, collections.abc.Iterable) + assert isinstance(self.scalar_cubes, collections.abc.Iterable) def test_iteration(self): letters = "abcd" * 5 for i, cube in enumerate(self.scalar_cubes): - self.assertEqual(cube.long_name, letters[i]) + assert cube.long_name == letters[i] -class TestPrint(tests.IrisTest): - def setUp(self): +class TestPrint: + @pytest.fixture(autouse=True) + def _setup(self): self.cubes = CubeList([iris.tests.stock.lat_lon_cube()]) def test_summary(self): expected = "0: unknown / (unknown) (latitude: 3; longitude: 4)" - self.assertEqual(str(self.cubes), expected) + assert str(self.cubes) == expected def test_summary_name_unit(self): self.cubes[0].long_name = "aname" self.cubes[0].units = "1" expected = "0: aname / (1) (latitude: 3; longitude: 4)" - self.assertEqual(str(self.cubes), expected) + assert str(self.cubes) == expected def test_summary_stash(self): self.cubes[0].attributes["STASH"] = STASH.from_msi("m01s00i004") expected = "0: m01s00i004 / (unknown) (latitude: 3; longitude: 4)" - self.assertEqual(str(self.cubes), expected) + assert str(self.cubes) == expected -class TestRealiseData(tests.IrisTest): - def test_realise_data(self): +class TestRealiseData: + def test_realise_data(self, mocker): # Simply check that calling CubeList.realise_data is calling # _lazy_data.co_realise_cubes. mock_cubes_list = [mock.Mock(ident=count) for count in range(3)] test_cubelist = CubeList(mock_cubes_list) - call_patch = self.patch("iris._lazy_data.co_realise_cubes") + call_patch = mocker.patch("iris._lazy_data.co_realise_cubes") test_cubelist.realise_data() # Check it was called once, passing cubes as *args. - self.assertEqual(call_patch.call_args_list, [mock.call(*mock_cubes_list)]) + assert call_patch.call_args_list == [mock.call(*mock_cubes_list)] -class Test_CubeList_copy(tests.IrisTest): - def setUp(self): +class Test_CubeList_copy: + @pytest.fixture(autouse=True) + def _setup(self): self.cube_list = iris.cube.CubeList() self.copied_cube_list = self.cube_list.copy() def test_copy(self): - self.assertIsInstance(self.copied_cube_list, iris.cube.CubeList) + assert isinstance(self.copied_cube_list, iris.cube.CubeList) class TestHtmlRepr: @@ -696,13 +703,13 @@ class TestHtmlRepr: """ @staticmethod - def test__repr_html_(): + def test__repr_html_(mocker): test_cubelist = CubeList([]) target = "iris.experimental.representation.CubeListRepresentation" - with mock.patch(target) as class_mock: - # Exercise the function-under-test. - test_cubelist._repr_html_() + class_mock = mocker.patch(target) + # Exercise the function-under-test. + test_cubelist._repr_html_() assert class_mock.call_args_list == [ # "CubeListRepresentation()" was called exactly once, with the cubelist as arg @@ -712,7 +719,3 @@ def test__repr_html_(): # "CubeListRepresentation(cubelist).repr_html()" was called exactly once, with no args mock.call() ] - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 878183139a..ae7a1cee7a 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -4,14 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.Cube` class aggregated_by method.""" -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - from unittest import mock from cf_units import Unit import numpy as np +import pytest from iris._lazy_data import as_lazy_data import iris.analysis @@ -20,11 +17,13 @@ import iris.coords from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube +from iris.tests import _shared_utils from iris.tests.stock import realistic_4d -class Test_aggregated_by(tests.IrisTest): - def setUp(self): +class Test_aggregated_by: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube(np.arange(44).reshape(4, 11)) val_coord = AuxCoord([0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val") @@ -99,13 +98,8 @@ def test_2d_coord_simple_agg(self): res_cube.slices("simple_agg"), self.cube.slices("simple_agg") ): cube_slice_agg = cube_slice.aggregated_by("simple_agg", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - self.assertEqual( - res_slice.coord("span_label"), - cube_slice_agg.coord("span_label"), - ) + assert res_slice.coord("spanning") == cube_slice_agg.coord("spanning") + assert res_slice.coord("span_label") == cube_slice_agg.coord("span_label") def test_agg_by_label(self): # Aggregate a cube on a string coordinate label where label @@ -122,8 +116,8 @@ def test_agg_by_label(self): long_name="label", units="no_unit", ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) + assert res_cube.coord("val") == val_coord + assert res_cube.coord("label") == label_coord def test_agg_by_label_bounded(self): # Aggregate a cube on a string coordinate label where label @@ -142,8 +136,8 @@ def test_agg_by_label_bounded(self): long_name="label", units="no_unit", ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) + assert res_cube.coord("val") == val_coord + assert res_cube.coord("label") == label_coord def test_2d_agg_by_label(self): res_cube = self.cube.aggregated_by("label", self.mock_agg) @@ -153,9 +147,7 @@ def test_2d_agg_by_label(self): res_cube.slices("val"), self.cube.slices("val") ): cube_slice_agg = cube_slice.aggregated_by("label", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) + assert res_slice.coord("spanning") == cube_slice_agg.coord("spanning") def test_agg_by_val(self): # Aggregate a cube on a numeric coordinate val where label @@ -169,8 +161,8 @@ def test_agg_by_val(self): label_coord = AuxCoord( np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) + assert res_cube.coord("val") == val_coord + assert res_cube.coord("label") == label_coord def test_2d_agg_by_val(self): res_cube = self.cube.aggregated_by("val", self.mock_agg) @@ -180,9 +172,7 @@ def test_2d_agg_by_val(self): res_cube.slices("val"), self.cube.slices("val") ): cube_slice_agg = cube_slice.aggregated_by("val", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) + assert res_slice.coord("spanning") == cube_slice_agg.coord("spanning") def test_single_string_aggregation(self): aux_coords = [ @@ -193,31 +183,31 @@ def test_single_string_aggregation(self): np.arange(12).reshape(3, 4), aux_coords_and_dims=aux_coords ) result = cube.aggregated_by("foo", MEAN) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual(result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar")) + assert result.shape == (2, 4) + assert result.coord("bar") == AuxCoord(["a|a", "a"], long_name="bar") def test_ancillary_variables_and_cell_measures_kept(self): cube_agg = self.cube.aggregated_by("val", self.mock_agg) - self.assertEqual(cube_agg.ancillary_variables(), [self.ancillary_variable]) - self.assertEqual(cube_agg.cell_measures(), [self.cell_measure]) + assert cube_agg.ancillary_variables() == [self.ancillary_variable] + assert cube_agg.cell_measures() == [self.cell_measure] def test_ancillary_variables_and_cell_measures_removed(self): cube_agg = self.cube.aggregated_by("simple_agg", self.mock_agg) - self.assertEqual(cube_agg.ancillary_variables(), []) - self.assertEqual(cube_agg.cell_measures(), []) + assert cube_agg.ancillary_variables() == [] + assert cube_agg.cell_measures() == [] def test_1d_weights(self): self.cube.aggregated_by( "simple_agg", self.mock_weighted_agg, weights=self.simple_weights ) - self.assertEqual(self.mock_weighted_agg.aggregate.call_count, 2) + assert self.mock_weighted_agg.aggregate.call_count == 2 # A simple mock.assert_called_with does not work due to ValueError: The # truth value of an array with more than one element is ambiguous. Use # a.any() or a.all() call_1 = self.mock_weighted_agg.aggregate.mock_calls[0] - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( call_1.args[0], np.array( [ @@ -226,8 +216,8 @@ def test_1d_weights(self): ] ), ) - self.assertEqual(call_1.kwargs["axis"], 0) - np.testing.assert_array_almost_equal( + assert call_1.kwargs["axis"] == 0 + _shared_utils.assert_array_almost_equal( call_1.kwargs["weights"], np.array( [ @@ -238,7 +228,7 @@ def test_1d_weights(self): ) call_2 = self.mock_weighted_agg.aggregate.mock_calls[1] - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( call_2.args[0], np.array( [ @@ -247,8 +237,8 @@ def test_1d_weights(self): ] ), ) - self.assertEqual(call_2.kwargs["axis"], 0) - np.testing.assert_array_almost_equal( + assert call_2.kwargs["axis"] == 0 + _shared_utils.assert_array_almost_equal( call_2.kwargs["weights"], np.array( [ @@ -261,13 +251,13 @@ def test_1d_weights(self): def test_2d_weights(self): self.cube.aggregated_by("val", self.mock_weighted_agg, weights=self.val_weights) - self.assertEqual(self.mock_weighted_agg.aggregate.call_count, 3) + assert self.mock_weighted_agg.aggregate.call_count == 3 # A simple mock.assert_called_with does not work due to ValueError: The # truth value of an array with more than one element is ambiguous. Use # a.any() or a.all() call_1 = self.mock_weighted_agg.aggregate.mock_calls[0] - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( call_1.args[0], np.array( [ @@ -278,33 +268,39 @@ def test_2d_weights(self): ] ), ) - self.assertEqual(call_1.kwargs["axis"], 1) - np.testing.assert_array_almost_equal(call_1.kwargs["weights"], np.ones((4, 6))) + assert call_1.kwargs["axis"] == 1 + _shared_utils.assert_array_almost_equal( + call_1.kwargs["weights"], np.ones((4, 6)) + ) call_2 = self.mock_weighted_agg.aggregate.mock_calls[1] - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( call_2.args[0], np.array([[3, 4, 10], [14, 15, 21], [25, 26, 32], [36, 37, 43]]), ) - self.assertEqual(call_2.kwargs["axis"], 1) - np.testing.assert_array_almost_equal(call_2.kwargs["weights"], np.ones((4, 3))) + assert call_2.kwargs["axis"] == 1 + _shared_utils.assert_array_almost_equal( + call_2.kwargs["weights"], np.ones((4, 3)) + ) call_3 = self.mock_weighted_agg.aggregate.mock_calls[2] - np.testing.assert_array_equal( + _shared_utils.assert_array_equal( call_3.args[0], np.array([[5, 8], [16, 19], [27, 30], [38, 41]]) ) - self.assertEqual(call_3.kwargs["axis"], 1) - np.testing.assert_array_almost_equal(call_3.kwargs["weights"], np.ones((4, 2))) + assert call_3.kwargs["axis"] == 1 + _shared_utils.assert_array_almost_equal( + call_3.kwargs["weights"], np.ones((4, 2)) + ) def test_returned(self): output = self.cube.aggregated_by( "simple_agg", self.mock_weighted_agg, returned=True ) - self.assertTrue(isinstance(output, tuple)) - self.assertEqual(len(output), 2) - self.assertEqual(output[0].shape, (2, 11)) - self.assertEqual(output[1].shape, (2, 11)) + assert isinstance(output, tuple) + assert len(output) == 2 + assert output[0].shape == (2, 11) + assert output[1].shape == (2, 11) def test_fail_1d_weights_wrong_len(self): wrong_weights = np.array([1.0, 2.0]) @@ -312,7 +308,7 @@ def test_fail_1d_weights_wrong_len(self): r"1D weights must have the same length as the dimension that is " r"aggregated, got 2, expected 11" ) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.cube.aggregated_by( "val", self.mock_weighted_agg, weights=wrong_weights ) @@ -323,14 +319,15 @@ def test_fail_weights_wrong_shape(self): r"Weights must either be 1D or have the same shape as the cube, " r"got shape \(42, 1\) for weights, \(4, 11\) for cube" ) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.cube.aggregated_by( "val", self.mock_weighted_agg, weights=wrong_weights ) -class Test_aggregated_by__lazy(tests.IrisTest): - def setUp(self): +class Test_aggregated_by__lazy: + @pytest.fixture(autouse=True) + def _setup(self): self.data = np.arange(44).reshape(4, 11) self.lazydata = as_lazy_data(self.data) self.cube = Cube(self.lazydata) @@ -394,11 +391,11 @@ def test_agg_by_label__lazy(self): long_name="label", units="no_unit", ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.label_mean) - self.assertFalse(res_cube.has_lazy_data()) + assert res_cube.has_lazy_data() + assert res_cube.coord("val") == val_coord + assert res_cube.coord("label") == label_coord + _shared_utils.assert_array_equal(res_cube.data, self.label_mean) + assert not res_cube.has_lazy_data() def test_agg_by_val__lazy(self): # Aggregate a cube on a numeric coordinate val where label @@ -412,11 +409,11 @@ def test_agg_by_val__lazy(self): label_coord = AuxCoord( np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.val_mean) - self.assertFalse(res_cube.has_lazy_data()) + assert res_cube.has_lazy_data() + assert res_cube.coord("val") == val_coord + assert res_cube.coord("label") == label_coord + _shared_utils.assert_array_equal(res_cube.data, self.val_mean) + assert not res_cube.has_lazy_data() def test_single_string_aggregation__lazy(self): aux_coords = [ @@ -429,22 +426,22 @@ def test_single_string_aggregation__lazy(self): ) means = np.array([[4.0, 5.0, 6.0, 7.0], [4.0, 5.0, 6.0, 7.0]]) result = cube.aggregated_by("foo", MEAN) - self.assertTrue(result.has_lazy_data()) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual(result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar")) - self.assertArrayEqual(result.data, means) - self.assertFalse(result.has_lazy_data()) + assert result.has_lazy_data() + assert result.shape == (2, 4) + assert result.coord("bar") == AuxCoord(["a|a", "a"], long_name="bar") + _shared_utils.assert_array_equal(result.data, means) + assert not result.has_lazy_data() def test_1d_weights__lazy(self): - self.assertTrue(self.cube.has_lazy_data()) + assert self.cube.has_lazy_data() cube_agg = self.cube.aggregated_by( "simple_agg", SUM, weights=self.simple_weights ) - self.assertTrue(self.cube.has_lazy_data()) - self.assertTrue(cube_agg.has_lazy_data()) - self.assertEqual(cube_agg.shape, (2, 11)) + assert self.cube.has_lazy_data() + assert cube_agg.has_lazy_data() + assert cube_agg.shape == (2, 11) row_0 = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] row_1 = [ @@ -460,18 +457,18 @@ def test_1d_weights__lazy(self): 146.0, 150.0, ] - np.testing.assert_array_almost_equal(cube_agg.data, np.array([row_0, row_1])) + _shared_utils.assert_array_almost_equal(cube_agg.data, np.array([row_0, row_1])) def test_2d_weights__lazy(self): - self.assertTrue(self.cube.has_lazy_data()) + assert self.cube.has_lazy_data() cube_agg = self.cube.aggregated_by("val", SUM, weights=self.val_weights) - self.assertTrue(self.cube.has_lazy_data()) - self.assertTrue(cube_agg.has_lazy_data()) + assert self.cube.has_lazy_data() + assert cube_agg.has_lazy_data() - self.assertEqual(cube_agg.shape, (4, 3)) - np.testing.assert_array_almost_equal( + assert cube_agg.shape == (4, 3) + _shared_utils.assert_array_almost_equal( cube_agg.data, np.array( [ @@ -484,21 +481,21 @@ def test_2d_weights__lazy(self): ) def test_returned__lazy(self): - self.assertTrue(self.cube.has_lazy_data()) + assert self.cube.has_lazy_data() output = self.cube.aggregated_by( "simple_agg", SUM, weights=self.simple_weights, returned=True ) - self.assertTrue(self.cube.has_lazy_data()) + assert self.cube.has_lazy_data() - self.assertTrue(isinstance(output, tuple)) - self.assertEqual(len(output), 2) + assert isinstance(output, tuple) + assert len(output) == 2 cube = output[0] - self.assertTrue(isinstance(cube, Cube)) - self.assertTrue(cube.has_lazy_data()) - self.assertEqual(cube.shape, (2, 11)) + assert isinstance(cube, Cube) + assert cube.has_lazy_data() + assert cube.shape == (2, 11) row_0 = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] row_1 = [ 110.0, @@ -513,11 +510,11 @@ def test_returned__lazy(self): 146.0, 150.0, ] - np.testing.assert_array_almost_equal(cube.data, np.array([row_0, row_1])) + _shared_utils.assert_array_almost_equal(cube.data, np.array([row_0, row_1])) weights = output[1] - self.assertEqual(weights.shape, (2, 11)) - np.testing.assert_array_almost_equal( + assert weights.shape == (2, 11) + _shared_utils.assert_array_almost_equal( weights, np.array( [ @@ -528,8 +525,9 @@ def test_returned__lazy(self): ) -class Test_aggregated_by__climatology(tests.IrisTest): - def setUp(self): +class Test_aggregated_by__climatology: + @pytest.fixture(autouse=True) + def _setup(self): self.data = np.arange(100).reshape(20, 5) self.aggregator = iris.analysis.MEAN @@ -626,15 +624,17 @@ def test_basic(self): result = self.get_result() aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) - self.assertTrue(aligned_coord.climatological) - self.assertIn(aligned_coord, result.dim_coords) + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(2)) + _shared_utils.assert_array_equal( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + assert aligned_coord.climatological + assert aligned_coord in result.dim_coords categorised_coord = result.coord("cat1") - self.assertArrayEqual(categorised_coord.points, np.arange(2)) - self.assertIsNone(categorised_coord.bounds) - self.assertFalse(categorised_coord.climatological) + _shared_utils.assert_array_equal(categorised_coord.points, np.arange(2)) + assert categorised_coord.bounds is None + assert not categorised_coord.climatological def test_2d_other_coord(self): """Check that we can handle aggregation applying to a 2d AuxCoord that @@ -643,19 +643,21 @@ def test_2d_other_coord(self): result = self.get_result(partially_aligned=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) - self.assertTrue(aligned_coord.climatological) + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(2)) + _shared_utils.assert_array_equal( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + assert aligned_coord.climatological part_aligned_coord = result.coord("part_aligned") - self.assertArrayEqual( + _shared_utils.assert_array_equal( part_aligned_coord.points, np.arange(46, 56).reshape(2, 5) ) - self.assertArrayEqual( + _shared_utils.assert_array_equal( part_aligned_coord.bounds, np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape(2, 5, 2), ) - self.assertFalse(part_aligned_coord.climatological) + assert not part_aligned_coord.climatological def test_2d_timelike_other_coord(self): """Check that we can handle aggregation applying to a 2d AuxCoord that @@ -666,64 +668,74 @@ def test_2d_timelike_other_coord(self): ) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) - self.assertTrue(aligned_coord.climatological) + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(2)) + _shared_utils.assert_array_equal( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + assert aligned_coord.climatological part_aligned_coord = result.coord("part_aligned") - self.assertArrayEqual(part_aligned_coord.points, np.arange(1, 11).reshape(2, 5)) - self.assertArrayEqual( + _shared_utils.assert_array_equal( + part_aligned_coord.points, np.arange(1, 11).reshape(2, 5) + ) + _shared_utils.assert_array_equal( part_aligned_coord.bounds, np.array([np.arange(1, 11), np.arange(91, 101)]).T.reshape(2, 5, 2), ) - self.assertTrue(part_aligned_coord.climatological) + assert part_aligned_coord.climatological def test_transposed(self): """Check that we can handle the axis of aggregation being a different one.""" result = self.get_result(transpose=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(2)) - self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) - self.assertTrue(aligned_coord.climatological) + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(2)) + _shared_utils.assert_array_equal( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + assert aligned_coord.climatological categorised_coord = result.coord("cat1") - self.assertArrayEqual(categorised_coord.points, np.arange(2)) - self.assertIsNone(categorised_coord.bounds) - self.assertFalse(categorised_coord.climatological) + _shared_utils.assert_array_equal(categorised_coord.points, np.arange(2)) + assert categorised_coord.bounds is None + assert not categorised_coord.climatological def test_bounded(self): """Check that we handle bounds correctly.""" result = self.get_result(bounds=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, [-0.5, 0.5]) - self.assertArrayEqual( + _shared_utils.assert_array_equal(aligned_coord.points, [-0.5, 0.5]) + _shared_utils.assert_array_equal( aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) ) - self.assertTrue(aligned_coord.climatological) + assert aligned_coord.climatological def test_multiple_agg_coords(self): """Check that we can aggregate on multiple coords on the same axis.""" result = self.get_result(second_categorised=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(10)) - self.assertArrayEqual( + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(10)) + _shared_utils.assert_array_equal( aligned_coord.bounds, np.array([np.arange(10), np.arange(10, 20)]).T, ) - self.assertTrue(aligned_coord.climatological) + assert aligned_coord.climatological categorised_coord1 = result.coord("cat1") - self.assertArrayEqual(categorised_coord1.points, np.tile(np.arange(2), 5)) - self.assertIsNone(categorised_coord1.bounds) - self.assertFalse(categorised_coord1.climatological) + _shared_utils.assert_array_equal( + categorised_coord1.points, np.tile(np.arange(2), 5) + ) + assert categorised_coord1.bounds is None + assert not categorised_coord1.climatological categorised_coord2 = result.coord("cat2") - self.assertArrayEqual(categorised_coord2.points, np.tile(np.arange(5), 2)) - self.assertIsNone(categorised_coord2.bounds) - self.assertFalse(categorised_coord2.climatological) + _shared_utils.assert_array_equal( + categorised_coord2.points, np.tile(np.arange(5), 2) + ) + assert categorised_coord2.bounds is None + assert not categorised_coord2.climatological def test_non_climatological_units(self): """Check that the failure to set the climatological flag on an incompatible @@ -732,9 +744,11 @@ def test_non_climatological_units(self): result = self.get_result(invalid_units=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(9, 11)) - self.assertArrayEqual(aligned_coord.bounds, np.array([[0, 18], [1, 19]])) - self.assertFalse(aligned_coord.climatological) + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(9, 11)) + _shared_utils.assert_array_equal( + aligned_coord.bounds, np.array([[0, 18], [1, 19]]) + ) + assert not aligned_coord.climatological def test_clim_in_clim_op(self): """Check the least complicated version works (set climatological, set @@ -744,16 +758,16 @@ def test_clim_in_clim_op(self): result = self.get_result(bounds=True, already_climatological=True) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, [-0.5, 0.5]) - self.assertArrayEqual( + _shared_utils.assert_array_equal(aligned_coord.points, [-0.5, 0.5]) + _shared_utils.assert_array_equal( aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) ) - self.assertTrue(aligned_coord.climatological) + assert aligned_coord.climatological categorised_coord = result.coord("cat1") - self.assertArrayEqual(categorised_coord.points, np.arange(2)) - self.assertIsNone(categorised_coord.bounds) - self.assertFalse(categorised_coord.climatological) + _shared_utils.assert_array_equal(categorised_coord.points, np.arange(2)) + assert categorised_coord.bounds is None + assert not categorised_coord.climatological def test_clim_in_no_clim_op(self): """Check the least complicated version works (set climatological, set @@ -765,20 +779,21 @@ def test_clim_in_no_clim_op(self): ) aligned_coord = result.coord("aligned") - self.assertArrayEqual(aligned_coord.points, np.arange(9, 11)) - self.assertArrayEqual( + _shared_utils.assert_array_equal(aligned_coord.points, np.arange(9, 11)) + _shared_utils.assert_array_equal( aligned_coord.bounds, np.array([[-0.5, 18.5], [0.5, 19.5]]) ) - self.assertTrue(aligned_coord.climatological) + assert aligned_coord.climatological categorised_coord = result.coord("cat1") - self.assertArrayEqual(categorised_coord.points, np.arange(2)) - self.assertIsNone(categorised_coord.bounds) - self.assertFalse(categorised_coord.climatological) + _shared_utils.assert_array_equal(categorised_coord.points, np.arange(2)) + assert categorised_coord.bounds is None + assert not categorised_coord.climatological -class Test_aggregated_by__derived(tests.IrisTest): - def setUp(self): +class Test_aggregated_by__derived: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = realistic_4d()[:, :10, :6, :8] self.time_cat_coord = AuxCoord([0, 0, 1, 1, 2, 2], long_name="time_cat") self.cube.add_aux_coord(self.time_cat_coord, 0) @@ -818,7 +833,3 @@ def test_ungrouped_dim(self): assert len(result.aux_factories) == 1 altitude = result.coord("altitude") assert altitude == self.cube.coord("altitude") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/cube/test_Cube__operators.py b/lib/iris/tests/unit/cube/test_Cube__operators.py index 0afd5a9d70..98b5963e3a 100644 --- a/lib/iris/tests/unit/cube/test_Cube__operators.py +++ b/lib/iris/tests/unit/cube/test_Cube__operators.py @@ -4,22 +4,20 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.cube.Cube` class operators.""" -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import operator import dask.array as da import numpy as np import numpy.ma as ma +import pytest import iris from iris._lazy_data import as_lazy_data from iris.coords import DimCoord +from iris.tests import _shared_utils -class Test_lazy_maths(tests.IrisTest): +class Test_lazy_maths: def build_lazy_cube(self, points, dtype=np.float64, bounds=None, nx=10): data = np.arange(len(points) * nx, dtype=dtype) + 1 # Just avoid 0. data = data.reshape(len(points), nx) @@ -32,23 +30,23 @@ def build_lazy_cube(self, points, dtype=np.float64, bounds=None, nx=10): return cube def check_common(self, base_cube, result): - self.assertTrue(base_cube.has_lazy_data()) - self.assertTrue(result.has_lazy_data()) - self.assertIsInstance(result.lazy_data(), da.core.Array) + assert base_cube.has_lazy_data() + assert result.has_lazy_data() + assert isinstance(result.lazy_data(), da.core.Array) def cube_cube_math_op(self, c1, math_op): result = math_op(c1, c1) self.check_common(c1, result) expected = math_op(c1.data, c1.data) - self.assertArrayAlmostEqual(result.data, expected) + _shared_utils.assert_array_almost_equal(result.data, expected) def cube_scalar_math_op(self, c1, scalar, math_op, commutative=True): result = math_op(c1, scalar) if commutative: - self.assertEqual(math_op(c1, scalar), math_op(scalar, c1)) + assert math_op(c1, scalar) == math_op(scalar, c1) self.check_common(c1, result) expected = math_op(c1.data, scalar) - self.assertArrayAlmostEqual(result.data, expected) + _shared_utils.assert_array_almost_equal(result.data, expected) def test_add_cubes__float(self): c1 = self.build_lazy_cube([1, 2]) @@ -139,12 +137,13 @@ def test_div_scalar__int(self): self.cube_scalar_math_op(c1, scalar, op, commutative=False) -class Test_lazy_maths__scalar_cube(tests.IrisTest): +class Test_lazy_maths__scalar_cube: def build_lazy_cube(self, value, dtype=np.float64): data = as_lazy_data(np.array(value, dtype=dtype)) return iris.cube.Cube(data, standard_name="air_temperature", units="K") - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.c1 = self.build_lazy_cube(3) self.c2 = self.build_lazy_cube(4) self.c3 = self.build_lazy_cube(3, dtype=np.int64) @@ -153,8 +152,8 @@ def setUp(self): def check_common(self, c1, c2, math_op): cube = math_op(c1, c2) data = cube.data - self.assertTrue(isinstance(data, np.ndarray)) - self.assertEqual(data.shape, ()) + assert isinstance(data, np.ndarray) + assert data.shape == () def test_add_scalar__int(self): c3, c4, op = self.c3, 5, operator.add @@ -221,7 +220,7 @@ def test_div_cubes__float(self): self.check_common(c1, c2, op) -class Test_lazy_maths__masked_data(tests.IrisTest): +class Test_lazy_maths__masked_data: def build_lazy_cube(self, dtype=np.float64): data = ma.array( [[1.0, 1.0], [1.0, 100000.0]], mask=[[0, 0], [0, 1]], dtype=dtype @@ -238,14 +237,10 @@ def test_subtract__float(self): cube_a = self.build_lazy_cube() cube_b = self.build_lazy_cube() cube_c = cube_a - cube_b - self.assertTrue(ma.isMaskedArray(cube_c.data)) + assert ma.isMaskedArray(cube_c.data) def test_subtract__int(self): cube_a = self.build_lazy_cube(dtype=np.int64) cube_b = self.build_lazy_cube(dtype=np.int64) cube_c = cube_a - cube_b - self.assertTrue(ma.isMaskedArray(cube_c.data)) - - -if __name__ == "__main__": - tests.main() + assert ma.isMaskedArray(cube_c.data) diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index 81e6c8cedf..c5982fc475 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -3,63 +3,3 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats` package.""" - -import iris.tests as tests # isort:skip - - -class TestField(tests.IrisTest): - def _test_for_coord( - self, field, convert, coord_predicate, expected_points, expected_bounds - ): - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - - # Check for one and only one matching coordinate. - coords_and_dims = dim_coords_and_dims + aux_coords_and_dims - matching_coords = [ - coord for coord, _ in coords_and_dims if coord_predicate(coord) - ] - self.assertEqual(len(matching_coords), 1, str(matching_coords)) - coord = matching_coords[0] - - # Check points and bounds. - if expected_points is not None: - self.assertArrayEqual(coord.points, expected_points) - - if expected_bounds is None: - self.assertIsNone(coord.bounds) - else: - self.assertArrayEqual(coord.bounds, expected_bounds) - - def assertCoordsAndDimsListsMatch( - self, coords_and_dims_got, coords_and_dims_expected - ): - """Check that coords_and_dims lists are equivalent. - - The arguments are lists of pairs of (coordinate, dimensions). - The elements are compared one-to-one, by coordinate name (so the order - of the lists is _not_ significant). - It also checks that the coordinate types (DimCoord/AuxCoord) match. - - """ - - def sorted_by_coordname(list): - return sorted(list, key=lambda item: item[0].name()) - - coords_and_dims_got = sorted_by_coordname(coords_and_dims_got) - coords_and_dims_expected = sorted_by_coordname(coords_and_dims_expected) - self.assertEqual(coords_and_dims_got, coords_and_dims_expected) - # Also check coordinate type equivalences (as Coord.__eq__ does not). - self.assertEqual( - [type(coord) for coord, dims in coords_and_dims_got], - [type(coord) for coord, dims in coords_and_dims_expected], - ) diff --git a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py index b67e02ec06..2f06c914ca 100644 --- a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py +++ b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py @@ -4,49 +4,22 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.abf.ABFField` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.abf import ABFField -class MethodCounter: - def __init__(self, method_name): - self.method_name = method_name - self.count = 0 - - def __enter__(self): - self.orig_method = getattr(ABFField, self.method_name) - - def new_method(*args, **kwargs): - self.count += 1 - self.orig_method(*args, **kwargs) - - setattr(ABFField, self.method_name, new_method) - return self - - def __exit__(self, exc_type, exc_value, traceback): - setattr(ABFField, self.method_name, self.orig_method) - return False - - -class Test_data(tests.IrisTest): - def test_single_read(self): +class Test_data: + def test_single_read(self, mocker): path = "0000000000000000jan00000" field = ABFField(path) - with mock.patch("iris.fileformats.abf.np.fromfile") as fromfile: - with MethodCounter("__getattr__") as getattr: - with MethodCounter("_read") as read: - field.data + # Fake the file fetch operation + fromfile = mocker.patch("iris.fileformats.abf.np.fromfile") + # Spy on the '_read' operation + read = mocker.spy(field, "_read") - fromfile.assert_called_once_with(path, dtype=">u1") - self.assertEqual(getattr.count, 1) - self.assertEqual(read.count, 1) + # do the access + field.data - -if __name__ == "__main__": - tests.main() + # Check that _read was called, and np.fromfile. + fromfile.assert_called_once_with(path, dtype=">u1") + assert read.call_count == 1 diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py index 25f64319af..3724a2f628 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py @@ -6,6 +6,8 @@ from unittest.mock import MagicMock +import pytest + from iris.fileformats.cf import ( CFAuxiliaryCoordinateVariable, CFCoordinateVariable, @@ -16,14 +18,11 @@ CFUGridMeshVariable, ) -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -class Tests(tests.IrisTest): +class Tests: # TODO: unit tests for existing functionality pre 2021-03-11. - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cf_group = CFGroup() def test_non_data_names(self): @@ -44,36 +43,37 @@ def test_non_data_names(self): expected_names = [var.cf_name for var in (aux_var, coord_var, coord_var2)] expected = set(expected_names) - self.assertEqual(expected, self.cf_group.non_data_variable_names) + assert self.cf_group.non_data_variable_names == expected -class Ugrid(tests.IrisTest): +class TestUgrid: """Separate class to test UGRID functionality.""" - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cf_group = CFGroup() def test_inherited(self): coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual(coord_var, self.cf_group.coordinates[coord_var.cf_name]) + assert self.cf_group.coordinates[coord_var.cf_name] == coord_var def test_connectivities(self): conn_var = MagicMock(spec=CFUGridConnectivityVariable, cf_name="conn_var") self.cf_group[conn_var.cf_name] = conn_var - self.assertEqual(conn_var, self.cf_group.connectivities[conn_var.cf_name]) + assert self.cf_group.connectivities[conn_var.cf_name] == conn_var def test_ugrid_coords(self): coord_var = MagicMock( spec=CFUGridAuxiliaryCoordinateVariable, cf_name="coord_var" ) self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual(coord_var, self.cf_group.ugrid_coords[coord_var.cf_name]) + assert self.cf_group.ugrid_coords[coord_var.cf_name] == coord_var def test_meshes(self): mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") self.cf_group[mesh_var.cf_name] = mesh_var - self.assertEqual(mesh_var, self.cf_group.meshes[mesh_var.cf_name]) + assert self.cf_group.meshes[mesh_var.cf_name] == mesh_var def test_non_data_names(self): data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") @@ -108,4 +108,4 @@ def test_non_data_names(self): ) ] expected = set(expected_names) - self.assertEqual(expected, self.cf_group.non_data_variable_names) + assert self.cf_group.non_data_variable_names == expected diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index f84ed5766b..7f37eb9f24 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -4,13 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.cf.CFReader` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from unittest import mock import numpy as np +import pytest from iris.fileformats.cf import ( CFCoordinateVariable, @@ -65,37 +62,37 @@ def netcdf_variable( standard_name=standard_name, **{name: None for name in ugrid_identities}, ) - if bounds is None: - del ncvar.bounds return ncvar -class Test_translate__global_attributes(tests.IrisTest): - def setUp(self): +class Test_translate__global_attributes: + @pytest.fixture(autouse=True) + def _setup(self, mocker): ncvar = netcdf_variable("ncvar", "height", np.float64) ncattrs = mock.Mock(return_value=["dimensions"]) getncattr = mock.Mock(return_value="something something_else") - self.dataset = mock.Mock( + dataset = mock.Mock( file_format="NetCDF4", variables={"ncvar": ncvar}, ncattrs=ncattrs, getncattr=getncattr, ) - - def test_create_global_attributes(self): - with mock.patch( + mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", - return_value=self.dataset, - ): - global_attrs = CFReader("dummy").cf_group.global_attributes - self.assertEqual(global_attrs["dimensions"], "something something_else") + return_value=dataset, + ) + def test_create_global_attributes(self, mocker): + global_attrs = CFReader("dummy").cf_group.global_attributes + assert global_attrs["dimensions"] == "something something_else" -class Test_translate__formula_terms(tests.IrisTest): - def setUp(self): - self.delta = netcdf_variable("delta", "height", np.float64) + +class Test_translate__formula_terms: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) - self.sigma = netcdf_variable("sigma", "height", np.float64) + self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" @@ -141,64 +138,53 @@ def setUp(self): file_format="NetCDF4", variables=self.variables, ncattrs=ncattrs ) # Restrict the CFReader functionality to only performing translations. - build_patch = mock.patch("iris.fileformats.cf.CFReader._build_cf_groups") - reset_patch = mock.patch("iris.fileformats.cf.CFReader._reset") - build_patch.start() - reset_patch.start() - self.addCleanup(build_patch.stop) - self.addCleanup(reset_patch.stop) - - def test_create_formula_terms(self): - with mock.patch( + mocker.patch("iris.fileformats.cf.CFReader._build_cf_groups") + mocker.patch("iris.fileformats.cf.CFReader._reset") + mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", return_value=self.dataset, - ): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check there is a singular data variable. - group = cf_group.data_variables - self.assertEqual(len(group), 1) - self.assertEqual(list(group.keys()), ["temp"]) - self.assertIs(group["temp"].cf_data, self.temp) - # Check there are three coordinates. - group = cf_group.coordinates - self.assertEqual(len(group), 3) - coordinates = ["height", "lat", "lon"] - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check there are three auxiliary coordinates. - group = cf_group.auxiliary_coordinates - self.assertEqual(len(group), 3) - aux_coordinates = ["delta", "sigma", "orography"] - self.assertEqual(set(group.keys()), set(aux_coordinates)) - for name in aux_coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check there are three bounds. - group = cf_group.bounds - self.assertEqual(len(group), 3) - bounds = ["height_bnds", "delta_bnds", "sigma_bnds"] - self.assertEqual(set(group.keys()), set(bounds)) - for name in bounds: - self.assertEqual(group[name].cf_data, getattr(self, name)) - # Check the formula terms contains all expected terms - formula_terms = cf_group.formula_terms - expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] - expected_group = { - k: v - for k, v in dict( - **cf_group.auxiliary_coordinates, **cf_group.bounds - ).items() - if k in expected_keys - } - self.assertEqual(set(expected_group.items()), set(formula_terms.items())) - + ) -class Test_build_cf_groups__formula_terms(tests.IrisTest): - def setUp(self): - self.delta = netcdf_variable("delta", "height", np.float64) + def test_create_formula_terms(self, mocker): + cf_group = CFReader("dummy").cf_group + assert len(cf_group) == len(self.variables) + # Check there is a singular data variable. + group = cf_group.data_variables + assert len(group) == 1 + assert list(group.keys()) == ["temp"] + assert group["temp"].cf_data is self.temp + # Check there are three coordinates. + group = cf_group.coordinates + assert len(group) == 3 + coordinates = ["height", "lat", "lon"] + assert set(group.keys()) == set(coordinates) + for name in coordinates: + assert group[name].cf_data is getattr(self, name) + # Check there are three auxiliary coordinates. + group = cf_group.auxiliary_coordinates + assert len(group) == 3 + aux_coordinates = ["delta", "sigma", "orography"] + assert set(group.keys()) == set(aux_coordinates) + for name in aux_coordinates: + assert group[name].cf_data is getattr(self, name) + # Check all the auxiliary coordinates are formula terms. + formula_terms = cf_group.formula_terms + assert set(group.items()) == set(formula_terms.items()) + # Check there are three bounds. + group = cf_group.bounds + assert len(group) == 3 + bounds = ["height_bnds", "delta_bnds", "sigma_bnds"] + assert set(group.keys()) == set(bounds) + for name in bounds: + assert group[name].cf_data == getattr(self, name) + + +class Test_build_cf_groups__formula_terms: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.delta = netcdf_variable("delta", "height", np.float64, bounds="delta_bnds") self.delta_bnds = netcdf_variable("delta_bnds", "height bnds", np.float64) - self.sigma = netcdf_variable("sigma", "height", np.float64) + self.sigma = netcdf_variable("sigma", "height", np.float64, bounds="sigma_bnds") self.sigma_bnds = netcdf_variable("sigma_bnds", "height bnds", np.float64) self.orography = netcdf_variable("orography", "lat lon", np.float64) formula_terms = "a: delta b: sigma orog: orography" @@ -249,183 +235,144 @@ def setUp(self): ) # Restrict the CFReader functionality to only performing translations # and building first level cf-groups for variables. - patcher = mock.patch("iris.fileformats.cf.CFReader._reset") - patcher.start() - self.addCleanup(patcher.stop) - - def test_associate_formula_terms_with_data_variable(self): - with mock.patch( + mocker.patch("iris.fileformats.cf.CFReader._reset") + mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", return_value=self.dataset, - ): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check the cf-group associated with the data variable. - temp_cf_group = cf_group["temp"].cf_group - # Check the data variable is associated with eight variables. - self.assertEqual(len(temp_cf_group), 8) - # Check there are three coordinates. - group = temp_cf_group.coordinates - self.assertEqual(len(group), 3) - coordinates = ["height", "lat", "lon"] - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check the height coordinate is bounded. - group = group["height"].cf_group - self.assertEqual(len(group.bounds), 1) - self.assertIn("height_bnds", group.bounds) - self.assertIs(group["height_bnds"].cf_data, self.height_bnds) - # Check there are five auxiliary coordinates. - group = temp_cf_group.auxiliary_coordinates - self.assertEqual(len(group), 5) - aux_coordinates = ["delta", "sigma", "orography", "x", "y"] - self.assertEqual(set(group.keys()), set(aux_coordinates)) - formula_terms = cf_group.formula_terms - for name in aux_coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check the terms by root. - for name, term in zip(aux_coordinates, ["a", "b", "orog"]): - self.assertEqual( - formula_terms[name].cf_terms_by_root, dict(height=term) - ) - # Check the bounded auxiliary coordinates. - for name, name_bnds in zip( - ["delta", "sigma"], ["delta_bnds", "sigma_bnds"] - ): - aux_coord_group = group[name].cf_group - self.assertEqual(len(aux_coord_group.bounds), 1) - self.assertIn(name_bnds, aux_coord_group.bounds) - self.assertIs( - aux_coord_group[name_bnds].cf_data, - getattr(self, name_bnds), - ) - # Check the formula terms contains all expected terms - expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] - expected_group = { - k: v - for k, v in dict( - **cf_group.auxiliary_coordinates, **cf_group.bounds - ).items() - if k in expected_keys - } - self.assertEqual(set(expected_group.items()), set(formula_terms.items())) + ) + + def test_associate_formula_terms_with_data_variable(self, mocker): + cf_group = CFReader("dummy").cf_group + assert len(cf_group) == len(self.variables) + # Check the cf-group associated with the data variable. + temp_cf_group = cf_group["temp"].cf_group + # Check the data variable is associated with eight variables. + assert len(temp_cf_group) == 8 + # Check there are three coordinates. + group = temp_cf_group.coordinates + assert len(group) == 3 + coordinates = ["height", "lat", "lon"] + assert set(group.keys()) == set(coordinates) + for name in coordinates: + assert group[name].cf_data is getattr(self, name) + # Check the height coordinate is bounded. + group = group["height"].cf_group + assert len(group.bounds) == 1 + assert "height_bnds" in group.bounds + assert group["height_bnds"].cf_data is self.height_bnds + # Check there are five auxiliary coordinates. + group = temp_cf_group.auxiliary_coordinates + assert len(group) == 5 + aux_coordinates = ["delta", "sigma", "orography", "x", "y"] + assert set(group.keys()) == set(aux_coordinates) + for name in aux_coordinates: + assert group[name].cf_data is getattr(self, name) + # Check all the auxiliary coordinates are formula terms. + formula_terms = cf_group.formula_terms + assert set(formula_terms.items()).issubset(list(group.items())) + # Check the terms by root. + for name, term in zip(aux_coordinates, ["a", "b", "orog"]): + assert formula_terms[name].cf_terms_by_root == dict(height=term) + # Check the bounded auxiliary coordinates. + for name, name_bnds in zip(["delta", "sigma"], ["delta_bnds", "sigma_bnds"]): + aux_coord_group = group[name].cf_group + assert len(aux_coord_group.bounds) == 1 + assert name_bnds in aux_coord_group.bounds + assert aux_coord_group[name_bnds].cf_data is getattr(self, name_bnds) def test_promote_reference(self): - with mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", - return_value=self.dataset, - ): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check the number of data variables. - self.assertEqual(len(cf_group.data_variables), 1) - self.assertEqual(list(cf_group.data_variables.keys()), ["temp"]) - # Check the number of promoted variables. - self.assertEqual(len(cf_group.promoted), 1) - self.assertEqual(list(cf_group.promoted.keys()), ["orography"]) - # Check the promoted variable dependencies. - group = cf_group.promoted["orography"].cf_group.coordinates - self.assertEqual(len(group), 2) - coordinates = ("lat", "lon") - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) + cf_group = CFReader("dummy").cf_group + assert len(cf_group) == len(self.variables) + # Check the number of data variables. + assert len(cf_group.data_variables) == 1 + assert list(cf_group.data_variables.keys()) == ["temp"] + # Check the number of promoted variables. + assert len(cf_group.promoted) == 1 + assert list(cf_group.promoted.keys()) == ["orography"] + # Check the promoted variable dependencies. + group = cf_group.promoted["orography"].cf_group.coordinates + assert len(group) == 2 + coordinates = ("lat", "lon") + assert set(group.keys()) == set(coordinates) + for name in coordinates: + assert group[name].cf_data == getattr(self, name) def test_formula_terms_ignore(self): self.orography.dimensions = ["lat", "wibble"] - with ( - mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", - return_value=self.dataset, - ), - mock.patch("warnings.warn") as warn, - ): + with pytest.warns(match="Ignoring formula terms variable"): cf_group = CFReader("dummy").cf_group group = cf_group.promoted - self.assertEqual(list(group.keys()), ["orography"]) - self.assertIs(group["orography"].cf_data, self.orography) - self.assertEqual(warn.call_count, 1) + assert list(group.keys()) == ["orography"] + assert group["orography"].cf_data == self.orography def test_auxiliary_ignore(self): self.x.dimensions = ["lat", "wibble"] - with ( - mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", - return_value=self.dataset, - ), - mock.patch("warnings.warn") as warn, - ): + with pytest.warns(match=r"Ignoring variable x"): cf_group = CFReader("dummy").cf_group promoted = ["x", "orography"] group = cf_group.promoted - self.assertEqual(set(group.keys()), set(promoted)) + assert set(group.keys()) == set(promoted) for name in promoted: - self.assertIs(group[name].cf_data, getattr(self, name)) - self.assertEqual(warn.call_count, 1) + assert group[name].cf_data == getattr(self, name) def test_promoted_auxiliary_ignore(self): self.wibble = netcdf_variable("wibble", "lat wibble", np.float64) self.variables["wibble"] = self.wibble self.orography.coordinates = "wibble" - with ( - mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", - return_value=self.dataset, - ), - mock.patch("warnings.warn") as warn, - ): + with pytest.warns(match="Ignoring variable wibble") as warns: cf_group = CFReader("dummy").cf_group.promoted promoted = ["wibble", "orography"] - self.assertEqual(set(cf_group.keys()), set(promoted)) + assert set(cf_group.keys()) == set(promoted) for name in promoted: - self.assertIs(cf_group[name].cf_data, getattr(self, name)) - self.assertEqual(warn.call_count, 2) + assert cf_group[name].cf_data == getattr(self, name) + # we should have got 2 warnings + assert len(warns.list) == 2 -class Test_build_cf_groups__ugrid(tests.IrisTest): - @classmethod - def setUpClass(cls): +class Test_build_cf_groups__ugrid: + @pytest.fixture(autouse=True) + def _setup_class(self, mocker): # Replicating syntax from test_CFReader.Test_build_cf_groups__formula_terms. - cls.mesh = netcdf_variable("mesh", "", int) - cls.node_x = netcdf_variable("node_x", "node", float) - cls.node_y = netcdf_variable("node_y", "node", float) - cls.face_x = netcdf_variable("face_x", "face", float) - cls.face_y = netcdf_variable("face_y", "face", float) - cls.face_nodes = netcdf_variable("face_nodes", "face vertex", int) - cls.levels = netcdf_variable("levels", "levels", int) - cls.data = netcdf_variable( + self.mesh = netcdf_variable("mesh", "", int) + self.node_x = netcdf_variable("node_x", "node", float) + self.node_y = netcdf_variable("node_y", "node", float) + self.face_x = netcdf_variable("face_x", "face", float) + self.face_y = netcdf_variable("face_y", "face", float) + self.face_nodes = netcdf_variable("face_nodes", "face vertex", int) + self.levels = netcdf_variable("levels", "levels", int) + self.data = netcdf_variable( "data", "levels face", float, coordinates="face_x face_y" ) # Add necessary attributes for mesh recognition. - cls.mesh.cf_role = "mesh_topology" - cls.mesh.node_coordinates = "node_x node_y" - cls.mesh.face_coordinates = "face_x face_y" - cls.mesh.face_node_connectivity = "face_nodes" - cls.face_nodes.cf_role = "face_node_connectivity" - cls.data.mesh = "mesh" + self.mesh.cf_role = "mesh_topology" + self.mesh.node_coordinates = "node_x node_y" + self.mesh.face_coordinates = "face_x face_y" + self.mesh.face_node_connectivity = "face_nodes" + self.face_nodes.cf_role = "face_node_connectivity" + self.data.mesh = "mesh" - cls.variables = dict( - mesh=cls.mesh, - node_x=cls.node_x, - node_y=cls.node_y, - face_x=cls.face_x, - face_y=cls.face_y, - face_nodes=cls.face_nodes, - levels=cls.levels, - data=cls.data, + self.variables = dict( + mesh=self.mesh, + node_x=self.node_x, + node_y=self.node_y, + face_x=self.face_x, + face_y=self.face_y, + face_nodes=self.face_nodes, + levels=self.levels, + data=self.data, ) ncattrs = mock.Mock(return_value=[]) - cls.dataset = mock.Mock( - file_format="NetCDF4", variables=cls.variables, ncattrs=ncattrs + self.dataset = mock.Mock( + file_format="NetCDF4", variables=self.variables, ncattrs=ncattrs ) - def setUp(self): + # @pytest.fixture(autouse=True) + # def _setup(self, mocker): # Restrict the CFReader functionality to only performing # translations and building first level cf-groups for variables. - self.patch("iris.fileformats.cf.CFReader._reset") - self.patch( + mocker.patch("iris.fileformats.cf.CFReader._reset") + mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", return_value=self.dataset, ) @@ -438,17 +385,17 @@ def test_inherited(self): [CFDataVariable("data", self.data), "data_variables"], ): expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, getattr(self.cf_group, collection)) + assert getattr(self.cf_group, collection) == expected def test_connectivities(self): expected_var = CFUGridConnectivityVariable("face_nodes", self.face_nodes) expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, self.cf_group.connectivities) + assert self.cf_group.connectivities == expected def test_mesh(self): expected_var = CFUGridMeshVariable("mesh", self.mesh) expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, self.cf_group.meshes) + assert self.cf_group.meshes == expected def test_ugrid_coords(self): names = [f"{loc}_{ax}" for loc in ("node", "face") for ax in ("x", "y")] @@ -456,11 +403,7 @@ def test_ugrid_coords(self): name: CFUGridAuxiliaryCoordinateVariable(name, getattr(self, name)) for name in names } - self.assertDictEqual(expected, self.cf_group.ugrid_coords) + assert self.cf_group.ugrid_coords == expected def test_is_cf_ugrid_group(self): - self.assertIsInstance(self.cf_group, CFGroup) - - -if __name__ == "__main__": - tests.main() + assert isinstance(self.cf_group, CFGroup) diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index ce4d6d6217..679f74c51d 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -4,65 +4,59 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.dot._dot_path`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import os.path import subprocess -from unittest import mock + +import pytest from iris.fileformats.dot import _DOT_EXECUTABLE_PATH, _dot_path -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Because _dot_path is triggered by the initial import we # reset the caching status to allow us to see what happens # under different circumstances. - self.patch("iris.fileformats.dot._DOT_CHECKED", new=False) + mocker.patch("iris.fileformats.dot._DOT_CHECKED", False) # Also patch the private path variable to the existing value (i.e. no # change), and restore it after each test: As these tests modify it, # that can potentially break subsequent 'normal' behaviour. - self.patch("iris.fileformats.dot._DOT_EXECUTABLE_PATH", _DOT_EXECUTABLE_PATH) + mocker.patch("iris.fileformats.dot._DOT_EXECUTABLE_PATH", _DOT_EXECUTABLE_PATH) - def test_valid_absolute_path(self): + def test_valid_absolute_path(self, mocker): # Override the configuration value for System.dot_path real_path = os.path.abspath(__file__) assert os.path.exists(real_path) and os.path.isabs(real_path) - with mock.patch("iris.config.get_option", return_value=real_path): - result = _dot_path() - self.assertEqual(result, real_path) + mocker.patch("iris.config.get_option", return_value=real_path) + result = _dot_path() + assert result == real_path - def test_invalid_absolute_path(self): + def test_invalid_absolute_path(self, mocker): # Override the configuration value for System.dot_path dummy_path = "/not_a_real_path" * 10 assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - result = _dot_path() - self.assertIsNone(result) + mocker.patch("iris.config.get_option", return_value=dummy_path) + result = _dot_path() + assert result is None - def test_valid_relative_path(self): + def test_valid_relative_path(self, mocker): # Override the configuration value for System.dot_path dummy_path = "not_a_real_path" * 10 assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - # Pretend we have a valid installation of dot - with mock.patch("subprocess.check_output"): - result = _dot_path() - self.assertEqual(result, dummy_path) + mocker.patch("iris.config.get_option", return_value=dummy_path) + # Pretend we have a valid installation of dot + mocker.patch("subprocess.check_output") + result = _dot_path() + assert result == dummy_path - def test_valid_relative_path_broken_install(self): + def test_valid_relative_path_broken_install(self, mocker): # Override the configuration value for System.dot_path dummy_path = "not_a_real_path" * 10 assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - # Pretend we have a broken installation of dot - error = subprocess.CalledProcessError(-5, "foo", "bar") - with mock.patch("subprocess.check_output", side_effect=error): - result = _dot_path() - self.assertIsNone(result) - - -if __name__ == "__main__": - tests.main() + mocker.patch("iris.config.get_option", return_value=dummy_path) + # Pretend we have a broken installation of dot + error = subprocess.CalledProcessError(-5, "foo", "bar") + mocker.patch("subprocess.check_output", side_effect=error) + result = _dot_path() + assert result is None diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py index ff80acf95b..b6cf87f5a6 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py @@ -4,41 +4,37 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import pytest import iris.coords from iris.fileformats.name_loaders import _build_cell_methods from iris.warnings import IrisLoadWarning -class Tests(tests.IrisTest): - def test_nameII_average(self): +class Tests: + def test_name_ii_average(self): av_or_int = ["something average ob bla"] coord_name = "foo" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("mean", "foo")]) + assert res == [iris.coords.CellMethod("mean", "foo")] - def test_nameIII_averaged(self): + def test_name_iii_averaged(self): av_or_int = ["something averaged ob bla"] coord_name = "bar" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("mean", "bar")]) + assert res == [iris.coords.CellMethod("mean", "bar")] - def test_nameII_integral(self): + def test_name_ii_integral(self): av_or_int = ["something integral ob bla"] coord_name = "ensemble" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("sum", "ensemble")]) + assert res == [iris.coords.CellMethod("sum", "ensemble")] - def test_nameIII_integrated(self): + def test_name_iii_integrated(self): av_or_int = ["something integrated ob bla"] coord_name = "time" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("sum", "time")]) + assert res == [iris.coords.CellMethod("sum", "time")] def test_no_averaging(self): av_or_int = [ @@ -51,9 +47,9 @@ def test_no_averaging(self): ] coord_name = "time" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [None] * len(av_or_int)) + assert res == [None] * len(av_or_int) - def test_nameII_mixed(self): + def test_name_ii_mixed(self): av_or_int = [ "something integral ob bla", "no averaging", @@ -61,16 +57,13 @@ def test_nameII_mixed(self): ] coord_name = "ensemble" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual( - res, - [ - iris.coords.CellMethod("sum", "ensemble"), - None, - iris.coords.CellMethod("mean", "ensemble"), - ], - ) + assert res == [ + iris.coords.CellMethod("sum", "ensemble"), + None, + iris.coords.CellMethod("mean", "ensemble"), + ] - def test_nameIII_mixed(self): + def test_name_iii_mixed(self): av_or_int = [ "something integrated ob bla", "no averaging", @@ -78,14 +71,11 @@ def test_nameIII_mixed(self): ] coord_name = "ensemble" res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual( - res, - [ - iris.coords.CellMethod("sum", "ensemble"), - None, - iris.coords.CellMethod("mean", "ensemble"), - ], - ) + assert res == [ + iris.coords.CellMethod("sum", "ensemble"), + None, + iris.coords.CellMethod("mean", "ensemble"), + ] def test_unrecognised(self): unrecognised_heading = "bla else" @@ -95,14 +85,13 @@ def test_unrecognised(self): "something integral", ] coord_name = "foo" - with mock.patch("warnings.warn") as warn: - _ = _build_cell_methods(av_or_int, coord_name) expected_msg = ( "Unknown {} statistic: {!r}. Unable to create cell method.".format( coord_name, unrecognised_heading ) ) - warn.assert_called_with(expected_msg, category=IrisLoadWarning) + with pytest.warns(IrisLoadWarning, match=expected_msg): + _ = _build_cell_methods(av_or_int, coord_name) def test_unrecognised_similar_to_no_averaging(self): unrecognised_headings = [ @@ -121,15 +110,10 @@ def test_unrecognised_similar_to_no_averaging(self): "something integral", ] coord_name = "foo" - with mock.patch("warnings.warn") as warn: - _ = _build_cell_methods(av_or_int, coord_name) expected_msg = ( "Unknown {} statistic: {!r}. Unable to create cell method.".format( coord_name, unrecognised_heading ) ) - warn.assert_called_with(expected_msg, category=IrisLoadWarning) - - -if __name__ == "__main__": - tests.main() + with pytest.warns(IrisLoadWarning, match=expected_msg): + _ = _build_cell_methods(av_or_int, coord_name) diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py index 9cc7ec356a..39067fdf2f 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py @@ -4,28 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from iris.fileformats.name_loaders import NAMECoord, _build_lat_lon_for_NAME_timeseries +from iris.tests._shared_utils import assert_array_equal -class TestCellMethods(tests.IrisTest): +class TestCellMethods: def test_float(self): column_headings = { "X": ["X = -.100 Lat-Long", "X = -1.600 Lat-Long"], "Y": ["Y = 52.450 Lat-Long", "Y = 51. Lat-Long"], } lat, lon = _build_lat_lon_for_NAME_timeseries(column_headings) - self.assertIsInstance(lat, NAMECoord) - self.assertIsInstance(lon, NAMECoord) - self.assertEqual(lat.name, "latitude") - self.assertEqual(lon.name, "longitude") - self.assertIsNone(lat.dimension) - self.assertIsNone(lon.dimension) - self.assertArrayEqual(lat.values, [52.45, 51.0]) - self.assertArrayEqual(lon.values, [-0.1, -1.6]) + assert isinstance(lat, NAMECoord) + assert isinstance(lon, NAMECoord) + assert lat.name == "latitude" + assert lon.name == "longitude" + assert lat.dimension is None + assert lon.dimension is None + assert_array_equal(lat.values, [52.45, 51.0]) + assert_array_equal(lon.values, [-0.1, -1.6]) def test_int(self): column_headings = { @@ -33,13 +30,13 @@ def test_int(self): "Y": ["Y = 52 Lat-Long", "Y = 51 Lat-Long"], } lat, lon = _build_lat_lon_for_NAME_timeseries(column_headings) - self.assertIsInstance(lat, NAMECoord) - self.assertIsInstance(lon, NAMECoord) - self.assertEqual(lat.name, "latitude") - self.assertEqual(lon.name, "longitude") - self.assertIsNone(lat.dimension) - self.assertIsNone(lon.dimension) - self.assertArrayEqual(lat.values, [52.0, 51.0]) - self.assertArrayEqual(lon.values, [-1.0, -2.0]) - self.assertIsInstance(lat.values[0], float) - self.assertIsInstance(lon.values[0], float) + assert isinstance(lat, NAMECoord) + assert isinstance(lon, NAMECoord) + assert lat.name == "latitude" + assert lon.name == "longitude" + assert lat.dimension is None + assert lon.dimension is None + assert_array_equal(lat.values, [52.0, 51.0]) + assert_array_equal(lon.values, [-1.0, -2.0]) + assert isinstance(lat.values[0], float) + assert isinstance(lon.values[0], float) diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py index 35ca2760b8..9b40aa47ff 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py @@ -4,58 +4,50 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import datetime from iris.fileformats.name_loaders import _calc_integration_period -class Test(tests.IrisTest): +class Test: def test_30_min_av(self): time_avgs = [" 30min average"] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_30_min_av_rspace(self): time_avgs = [" 30min average "] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_30_min_av_lstrip(self): time_avgs = [" 30min average".lstrip()] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_3_hour_av(self): time_avgs = [" 3hr 0min average"] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (3 * 60 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_3_hour_int(self): time_avgs = [" 3hr 0min integral"] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (3 * 60 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_12_hour_av(self): time_avgs = [" 12hr 0min average"] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (12 * 60 * 60))] - self.assertEqual(result, expected) + assert result == expected def test_5_day_av(self): time_avgs = [" 5day 0hr 0min integral"] result = _calc_integration_period(time_avgs) expected = [datetime.timedelta(0, (5 * 24 * 60 * 60))] - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() + assert result == expected diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py index 86729ef024..cd32f53843 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py @@ -7,31 +7,26 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.coords import AuxCoord from iris.fileformats.name_loaders import _cf_height_from_name -class TestAll(tests.IrisTest): - def _default_coord(self, data): - # This private method returns a coordinate with values expected - # when no interpretation is made of the field header string. - return AuxCoord( - units="no-unit", - points=data, - bounds=None, - standard_name=None, - long_name="z", - attributes={"positive": "up"}, - ) +def _default_coord(data): + # This private method returns a coordinate with values expected + # when no interpretation is made of the field header string. + return AuxCoord( + units="no-unit", + points=data, + bounds=None, + standard_name=None, + long_name="z", + attributes={"positive": "up"}, + ) -class TestAll_NAMEII(TestAll): +class TestAll_NAMEII: # NAMEII formats are defined by bounds, not points def test_bounded_height_above_ground(self): data = "From 0 - 100m agl" @@ -44,7 +39,7 @@ def test_bounded_height_above_ground(self): long_name="height above ground level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_bounded_flight_level(self): data = "From FL0 - FL100" @@ -57,7 +52,7 @@ def test_bounded_flight_level(self): long_name="flight_level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_bounded_height_above_sea_level(self): data = "From 0 - 100m asl" @@ -70,31 +65,31 @@ def test_bounded_height_above_sea_level(self): long_name="altitude above sea level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_malformed_height_above_ground(self): # Parse height above ground level with additional stuff on the end of # the string (agl). data = "From 0 - 100m agl and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_malformed_height_above_sea_level(self): # Parse height above ground level with additional stuff on the end of # the string (agl). data = "From 0 - 100m asl and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_malformed_flight_level(self): # Parse height above ground level with additional stuff on the end of # the string (agl). data = "From FL0 - FL100 and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_float_bounded_height_above_ground(self): # Parse height above ground level when its a float. @@ -108,7 +103,7 @@ def test_float_bounded_height_above_ground(self): long_name="height above ground level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_float_bounded_height_flight_level(self): # Parse height above ground level, as a float (agl). @@ -122,7 +117,7 @@ def test_float_bounded_height_flight_level(self): long_name="flight_level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_float_bounded_height_above_sea_level(self): # Parse height above ground level as a float (agl). @@ -136,15 +131,15 @@ def test_float_bounded_height_above_sea_level(self): long_name="altitude above sea level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_no_match(self): # Parse height information when there is no match. # No interpretation, just returns default values. data = "Vertical integral" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_pressure(self): # Parse air_pressure string. @@ -158,10 +153,10 @@ def test_pressure(self): long_name=None, attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res -class TestAll_NAMEIII(TestAll): +class TestAll_NAMEIII: # NAMEIII formats are defined by points, not bounds. def test_height_above_ground(self): data = "Z = 50.00000 m agl" @@ -174,7 +169,7 @@ def test_height_above_ground(self): long_name="height above ground level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_height_flight_level(self): data = "Z = 50.00000 FL" @@ -187,7 +182,7 @@ def test_height_flight_level(self): long_name="flight_level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_height_above_sea_level(self): data = "Z = 50.00000 m asl" @@ -200,31 +195,31 @@ def test_height_above_sea_level(self): long_name="altitude above sea level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_malformed_height_above_ground(self): # Parse height above ground level, with additional stuff at the string # end (agl). data = "Z = 50.00000 m agl and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_malformed_height_above_sea_level(self): # Parse height above ground level, with additional stuff at string # end (agl). data = "Z = 50.00000 m asl and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_malformed_flight_level(self): # Parse height above ground level (agl), with additional stuff at # string end. data = "Z = 50.00000 FL and stuff" res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) + com = _default_coord(data) + assert com == res def test_integer_height_above_ground(self): # Parse height above ground level when its an integer. @@ -238,7 +233,7 @@ def test_integer_height_above_ground(self): long_name="height above ground level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_integer_height_flight_level(self): # Parse flight level when its an integer. @@ -252,7 +247,7 @@ def test_integer_height_flight_level(self): long_name="flight_level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_integer_height_above_sea_level(self): # Parse height above sea level (asl) when its an integer. @@ -266,7 +261,7 @@ def test_integer_height_above_sea_level(self): long_name="altitude above sea level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_enotation_height_above_ground(self): # Parse height above ground expressed in scientific notation @@ -280,7 +275,7 @@ def test_enotation_height_above_ground(self): long_name="height above ground level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_enotation_height_above_sea_level(self): # Parse height above sea level expressed in scientific notation @@ -294,7 +289,7 @@ def test_enotation_height_above_sea_level(self): long_name="altitude above sea level", attributes={"positive": "up"}, ) - self.assertEqual(com, res) + assert com == res def test_pressure(self): # Parse pressure. @@ -308,8 +303,4 @@ def test_pressure(self): long_name=None, attributes={"positive": "up"}, ) - self.assertEqual(com, res) - - -if __name__ == "__main__": - tests.main() + assert com == res diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py index fc00db9663..7238b68d31 100644 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py @@ -4,20 +4,17 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from datetime import datetime, timedelta from unittest import mock import numpy as np from iris.fileformats.name_loaders import NAMECoord, _generate_cubes +from iris.tests._shared_utils import assert_array_equal -class TestCellMethods(tests.IrisTest): - def test_cell_methods(self): +class TestCellMethods: + def test_cell_methods(self, mocker): header = mock.MagicMock() column_headings = { "Species": [1, 2, 3], @@ -29,8 +26,8 @@ def test_cell_methods(self): data_arrays = [mock.Mock(), mock.Mock()] cell_methods = ["cell_method_1", "cell_method_2"] - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") + mocker.patch("iris.fileformats.name_loaders._cf_height_from_name") + mocker.patch("iris.cube.Cube") cubes = list( _generate_cubes(header, column_headings, coords, data_arrays, cell_methods) ) @@ -39,8 +36,9 @@ def test_cell_methods(self): cubes[1].assert_has_calls([mock.call.add_cell_method("cell_method_2")]) -class TestCircularLongitudes(tests.IrisTest): - def _simulate_with_coords(self, names, values, dimensions): +class TestCircularLongitudes: + @staticmethod + def _simulate_with_coords(mocker, names, values, dimensions): header = mock.MagicMock() column_headings = { "Species": [1, 2, 3], @@ -54,50 +52,53 @@ def _simulate_with_coords(self, names, values, dimensions): ] data_arrays = [mock.Mock()] - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") + mocker.patch("iris.fileformats.name_loaders._cf_height_from_name") + mocker.patch("iris.cube.Cube") cubes = list(_generate_cubes(header, column_headings, coords, data_arrays)) return cubes - def test_non_circular(self): + def test_non_circular(self, mocker): results = self._simulate_with_coords( - names=["longitude"], values=[[1, 7, 23]], dimensions=[0] + mocker, names=["longitude"], values=[[1, 7, 23]], dimensions=[0] ) - self.assertEqual(len(results), 1) + assert len(results) == 1 add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 1) + assert len(add_coord_calls) == 1 coord = add_coord_calls[0][0][0] - self.assertEqual(coord.circular, False) + assert coord.circular is False - def test_circular(self): + def test_circular(self, mocker): results = self._simulate_with_coords( + mocker, names=["longitude"], values=[[5.0, 95.0, 185.0, 275.0]], dimensions=[0], ) - self.assertEqual(len(results), 1) + assert len(results) == 1 add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 1) + assert len(add_coord_calls) == 1 coord = add_coord_calls[0][0][0] - self.assertEqual(coord.circular, True) + assert coord.circular is True - def test_lat_lon_byname(self): + def test_lat_lon_byname(self, mocker): results = self._simulate_with_coords( + mocker, names=["longitude", "latitude"], values=[[5.0, 95.0, 185.0, 275.0], [5.0, 95.0, 185.0, 275.0]], dimensions=[0, 1], ) - self.assertEqual(len(results), 1) + assert len(results) == 1 add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 2) + assert len(add_coord_calls) == 2 lon_coord = add_coord_calls[0][0][0] lat_coord = add_coord_calls[1][0][0] - self.assertEqual(lon_coord.circular, True) - self.assertEqual(lat_coord.circular, False) + assert lon_coord.circular is True + assert lat_coord.circular is False -class TestTimeCoord(tests.IrisTest): - def _simulate_with_coords(self, names, values, dimensions): +class TestTimeCoord: + @staticmethod + def _simulate_with_coords(mocker, names, values, dimensions): header = mock.MagicMock() column_headings = { "Species": [1, 2, 3], @@ -111,13 +112,14 @@ def _simulate_with_coords(self, names, values, dimensions): ] data_arrays = [mock.Mock()] - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") + mocker.patch("iris.fileformats.name_loaders._cf_height_from_name") + mocker.patch("iris.cube.Cube") cubes = list(_generate_cubes(header, column_headings, coords, data_arrays)) return cubes - def test_time_dim(self): + def test_time_dim(self, mocker): results = self._simulate_with_coords( + mocker, names=["longitude", "latitude", "time"], values=[ [10, 20], @@ -126,33 +128,30 @@ def test_time_dim(self): ], dimensions=[0, 1, 2], ) - self.assertEqual(len(results), 1) + assert len(results) == 1 result = results[0] dim_coord_calls = result.add_dim_coord.call_args_list - self.assertEqual(len(dim_coord_calls), 3) # lon, lat, time + assert len(dim_coord_calls) == 3 # lon, lat, time t_coord = dim_coord_calls[2][0][0] - self.assertEqual(t_coord.standard_name, "time") - self.assertArrayEqual(t_coord.points, [398232, 398256]) - self.assertArrayEqual(t_coord.bounds[0], [398208, 398232]) - self.assertArrayEqual(t_coord.bounds[-1], [398232, 398256]) + assert t_coord.standard_name == "time" + assert_array_equal(t_coord.points, [398232, 398256]) + assert_array_equal(t_coord.bounds[0], [398208, 398232]) + assert_array_equal(t_coord.bounds[-1], [398232, 398256]) - def test_time_scalar(self): + def test_time_scalar(self, mocker): results = self._simulate_with_coords( + mocker, names=["longitude", "latitude", "time"], values=[[10, 20], [30, 40], [datetime(2015, 6, 7)]], dimensions=[0, 1, None], ) - self.assertEqual(len(results), 1) + assert len(results) == 1 result = results[0] dim_coord_calls = result.add_dim_coord.call_args_list - self.assertEqual(len(dim_coord_calls), 2) + assert len(dim_coord_calls) == 2 aux_coord_calls = result.add_aux_coord.call_args_list - self.assertEqual(len(aux_coord_calls), 1) + assert len(aux_coord_calls) == 1 t_coord = aux_coord_calls[0][0][0] - self.assertEqual(t_coord.standard_name, "time") - self.assertArrayEqual(t_coord.points, [398232]) - self.assertArrayEqual(t_coord.bounds, [[398208, 398232]]) - - -if __name__ == "__main__": - tests.main() + assert t_coord.standard_name == "time" + assert_array_equal(t_coord.points, [398232]) + assert_array_equal(t_coord.bounds, [[398208, 398232]]) diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py index 2767807377..5c6c59c3a1 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py @@ -4,24 +4,22 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.nimrod_load_rules.units` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.cube import Cube from iris.fileformats.nimrod import NimrodField from iris.fileformats.nimrod_load_rules import NIMROD_DEFAULT, units +from iris.tests._shared_utils import ( + assert_array_almost_equal, + assert_no_warnings_regexp, +) -class Test(tests.IrisTest): - NIMROD_LOCATION = "iris.fileformats.nimrod_load_rules" - - def setUp(self): - self.field = mock.Mock( +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.field = mocker.Mock( units="", int_mdi=-32767, float32_mdi=NIMROD_DEFAULT, @@ -36,90 +34,78 @@ def _call_units(self, data=None, units_str=None): self.field.units = units_str units(self.cube, self.field) - def test_null(self): - with mock.patch("warnings.warn") as warn: + def test_null(self, mocker): + with assert_no_warnings_regexp(): self._call_units(units_str="m") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.units == "m" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) - def test_times32(self): - with mock.patch("warnings.warn") as warn: + def test_times32(self, mocker): + with assert_no_warnings_regexp(): self._call_units( data=np.ones_like(self.cube.data) * 32, units_str="mm/hr*32" ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "mm/hr") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "mm/hr" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_visibility_units(self): - with mock.patch("warnings.warn") as warn: + def test_visibility_units(self, mocker): + with assert_no_warnings_regexp(): self._call_units( data=((np.ones_like(self.cube.data) / 2) - 25000), units_str="m/2-25k", ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "m" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_power_in_units(self): - with mock.patch("warnings.warn") as warn: + def test_power_in_units(self, mocker): + with assert_no_warnings_regexp(): self._call_units( data=np.ones_like(self.cube.data) * 1000, units_str="mm*10^3" ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "mm") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "mm" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_ug_per_m3_units(self): - with mock.patch("warnings.warn") as warn: + def test_ug_per_m3_units(self, mocker): + with assert_no_warnings_regexp(): self._call_units( data=(np.ones_like(self.cube.data) * 10), units_str="ug/m3E1", ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "ug/m3") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "ug/m3" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_g_per_kg(self): - with mock.patch("warnings.warn") as warn: + def test_g_per_kg(self, mocker): + with assert_no_warnings_regexp(): self._call_units( data=(np.ones_like(self.cube.data) * 1000), units_str="g/Kg" ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "kg/kg") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "kg/kg" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_unit_expection_dictionary(self): - with mock.patch("warnings.warn") as warn: + def test_unit_expection_dictionary(self, mocker): + with assert_no_warnings_regexp(): self._call_units(units_str="mb") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "hPa") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "hPa" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_per_second(self): - with mock.patch("warnings.warn") as warn: + def test_per_second(self, mocker): + with assert_no_warnings_regexp(): self._call_units(units_str="/s") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "s^-1") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) + assert self.cube.units == "s^-1" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 - def test_unhandled_unit(self): - with mock.patch("warnings.warn") as warn: + def test_unhandled_unit(self, mocker): + warning_message = "Unhandled units 'kittens' recorded in cube attributes" + with pytest.warns(match=warning_message): self._call_units(units_str="kittens") - self.assertEqual(warn.call_count, 1) - self.assertEqual(self.cube.units, "") - self.assertArrayAlmostEqual(self.cube.data, np.ones_like(self.cube.data)) - self.assertEqual(self.cube.data.dtype, np.float32) - self.assertEqual(self.cube.attributes["invalid_units"], "kittens") - - -if __name__ == "__main__": - tests.main() + assert self.cube.units == "" + assert_array_almost_equal(self.cube.data, np.ones_like(self.cube.data)) + assert self.cube.data.dtype == np.float32 + assert self.cube.attributes["invalid_units"] == "kittens" diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py index 809c54726c..2a3d06e56a 100644 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py @@ -7,11 +7,7 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import pytest from iris.fileformats.nimrod import NimrodField from iris.fileformats.nimrod_load_rules import ( @@ -19,13 +15,13 @@ TranslationWarning, vertical_coord, ) +from iris.tests._shared_utils import assert_no_warnings_regexp -class Test(tests.IrisTest): - NIMROD_LOCATION = "iris.fileformats.nimrod_load_rules" - - def setUp(self): - self.field = mock.Mock( +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.field = mocker.Mock( vertical_coord=NIMROD_DEFAULT, vertical_coord_type=NIMROD_DEFAULT, reference_vertical_coord=NIMROD_DEFAULT, @@ -34,7 +30,7 @@ def setUp(self): float32_mdi=NIMROD_DEFAULT, spec=NimrodField, ) - self.cube = mock.Mock() + self.cube = mocker.Mock() def _call_vertical_coord( self, @@ -54,23 +50,15 @@ def _call_vertical_coord( vertical_coord(self.cube, self.field) def test_unhandled(self): - with mock.patch("warnings.warn") as warn: + message_regexp = "Vertical coord -1 not yet handled" + with pytest.warns(TranslationWarning, match=message_regexp): self._call_vertical_coord(vertical_coord_val=1.0, vertical_coord_type=-1) - warn.assert_called_once_with( - "Vertical coord -1 not yet handled", category=TranslationWarning - ) def test_null(self): - with mock.patch("warnings.warn") as warn: + with assert_no_warnings_regexp(): self._call_vertical_coord(vertical_coord_type=NIMROD_DEFAULT) self._call_vertical_coord(vertical_coord_type=self.field.int_mdi) - self.assertEqual(warn.call_count, 0) def test_ground_level(self): - with mock.patch("warnings.warn") as warn: + with assert_no_warnings_regexp(): self._call_vertical_coord(vertical_coord_val=9999.0, vertical_coord_type=0) - self.assertEqual(warn.call_count, 0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py index c8361feae4..dd6485b015 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py @@ -3,3 +3,26 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats.pp_load_rules` module.""" + + +# a general utility function for PP field tests +def assert_coords_and_dims_lists_match(coords_and_dims_got, coords_and_dims_expected): + """Check that coords_and_dims lists are equivalent. + + The arguments are lists of pairs of (coordinate, dimensions). + The elements are compared one-to-one, by coordinate name (so the order + of the lists is _not_ significant). + It also checks that the coordinate types (DimCoord/AuxCoord) match. + + """ + + def sorted_by_coordname(list): + return sorted(list, key=lambda item: item[0].name()) + + coords_and_dims_got = sorted_by_coordname(coords_and_dims_got) + coords_and_dims_expected = sorted_by_coordname(coords_and_dims_expected) + assert coords_and_dims_got == coords_and_dims_expected + # Also check coordinate type equivalences (as Coord.__eq__ does not). + assert [type(coord) for coord, dims in coords_and_dims_got] == [ + type(coord) for coord, dims in coords_and_dims_expected + ] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py index e2c71790b4..05f9c6f82b 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py @@ -4,10 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from unittest import mock from cf_units import CALENDAR_360_DAY, Unit @@ -17,7 +13,7 @@ from iris.coords import AuxCoord, CellMethod, DimCoord from iris.fileformats.pp import SplittableInt from iris.fileformats.pp_load_rules import _all_other_rules -from iris.tests.unit.fileformats import TestField +from iris.tests.unit.fileformats.pp_load_rules import assert_coords_and_dims_lists_match # iris.fileformats.pp_load_rules._all_other_rules() returns a tuple of # of various metadata. This constant is the index into this @@ -27,88 +23,88 @@ AUX_COORDS_INDEX = 7 -class TestCellMethods(tests.IrisTest): - def test_time_mean(self): +class TestCellMethods: + def test_time_mean(self, mocker): # lbproc = 128 -> mean # lbtim.ib = 2 -> simple t1 to t2 interval. - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=2, ic=3)) + field = mocker.MagicMock(lbproc=128, lbtim=mocker.Mock(ia=0, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("mean", "time")] - self.assertEqual(res, expected) + assert res == expected - def test_hourly_mean(self): + def test_hourly_mean(self, mocker): # lbtim.ia = 1 -> hourly - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=1, ib=2, ic=3)) + field = mocker.MagicMock(lbproc=128, lbtim=mocker.Mock(ia=1, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("mean", "time", "1 hour")] - self.assertEqual(res, expected) + assert res == expected - def test_daily_mean(self): + def test_daily_mean(self, mocker): # lbtim.ia = 24 -> daily - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=24, ib=2, ic=3)) + field = mocker.MagicMock(lbproc=128, lbtim=mocker.Mock(ia=24, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("mean", "time", "24 hour")] - self.assertEqual(res, expected) + assert res == expected - def test_custom_max(self): - field = mock.MagicMock(lbproc=8192, lbtim=mock.Mock(ia=47, ib=2, ic=3)) + def test_custom_max(self, mocker): + field = mocker.MagicMock(lbproc=8192, lbtim=mocker.Mock(ia=47, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("maximum", "time", "47 hour")] - self.assertEqual(res, expected) + assert res == expected - def test_daily_min(self): + def test_daily_min(self, mocker): # lbproc = 4096 -> min - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=2, ic=3)) + field = mocker.MagicMock(lbproc=4096, lbtim=mocker.Mock(ia=24, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("minimum", "time", "24 hour")] - self.assertEqual(res, expected) + assert res == expected - def test_time_mean_over_multiple_years(self): + def test_time_mean_over_multiple_years(self, mocker): # lbtim.ib = 3 -> interval within a year, over multiple years. - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=3, ic=3)) + field = mocker.MagicMock(lbproc=128, lbtim=mocker.Mock(ia=0, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [ CellMethod("mean within years", "time"), CellMethod("mean over years", "time"), ] - self.assertEqual(res, expected) + assert res == expected - def test_hourly_mean_over_multiple_years(self): - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=1, ib=3, ic=3)) + def test_hourly_mean_over_multiple_years(self, mocker): + field = mocker.MagicMock(lbproc=128, lbtim=mocker.Mock(ia=1, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [ CellMethod("mean within years", "time", "1 hour"), CellMethod("mean over years", "time"), ] - self.assertEqual(res, expected) + assert res == expected - def test_climatology_max(self): - field = mock.MagicMock(lbproc=8192, lbtim=mock.Mock(ia=24, ib=3, ic=3)) + def test_climatology_max(self, mocker): + field = mocker.MagicMock(lbproc=8192, lbtim=mocker.Mock(ia=24, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("maximum", "time")] - self.assertEqual(res, expected) + assert res == expected - def test_climatology_min(self): - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=3, ic=3)) + def test_climatology_min(self, mocker): + field = mocker.MagicMock(lbproc=4096, lbtim=mocker.Mock(ia=24, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("minimum", "time")] - self.assertEqual(res, expected) + assert res == expected - def test_other_lbtim_ib(self): + def test_other_lbtim_ib(self, mocker): # lbtim.ib = 5 -> non-specific aggregation - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=5, ic=3)) + field = mocker.MagicMock(lbproc=4096, lbtim=mocker.Mock(ia=24, ib=5, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod("minimum", "time")] - self.assertEqual(res, expected) + assert res == expected - def test_multiple_unordered_lbprocs(self): - field = mock.MagicMock( + def test_multiple_unordered_lbprocs(self, mocker): + field = mocker.MagicMock( lbproc=192, bzx=0, bdx=1, lbnpt=3, lbrow=3, - lbtim=mock.Mock(ia=24, ib=5, ic=3), + lbtim=mocker.Mock(ia=24, ib=5, ic=3), lbcode=SplittableInt(1), x_bounds=None, _x_coord_name=lambda: "longitude", @@ -122,16 +118,16 @@ def test_multiple_unordered_lbprocs(self): CellMethod("mean", "time"), CellMethod("mean", "longitude"), ] - self.assertEqual(res, expected) + assert res == expected - def test_multiple_unordered_rotated_lbprocs(self): - field = mock.MagicMock( + def test_multiple_unordered_rotated_lbprocs(self, mocker): + field = mocker.MagicMock( lbproc=192, bzx=0, bdx=1, lbnpt=3, lbrow=3, - lbtim=mock.Mock(ia=24, ib=5, ic=3), + lbtim=mocker.Mock(ia=24, ib=5, ic=3), lbcode=SplittableInt(101), x_bounds=None, _x_coord_name=lambda: "grid_longitude", @@ -145,15 +141,15 @@ def test_multiple_unordered_rotated_lbprocs(self): CellMethod("mean", "time"), CellMethod("mean", "grid_longitude"), ] - self.assertEqual(res, expected) + assert res == expected -class TestCrossSectionalTime(TestField): - def test_lbcode3x23(self): +class TestCrossSectionalTime: + def test_lbcode3x23(self, mocker): time_bounds = np.array( [[0.875, 1.125], [1.125, 1.375], [1.375, 1.625], [1.625, 1.875]] ) - field = mock.MagicMock( + field = mocker.MagicMock( lbproc=0, bzx=0, bdx=0, @@ -161,7 +157,7 @@ def test_lbcode3x23(self): lbrow=4, t1=nc_datetime(2000, 1, 2, hour=0, minute=0, second=0), t2=nc_datetime(2000, 1, 3, hour=0, minute=0, second=0), - lbtim=mock.Mock(ia=1, ib=2, ic=2), + lbtim=mocker.Mock(ia=1, ib=2, ic=2), lbcode=SplittableInt(31323, {"iy": slice(0, 2), "ix": slice(2, 4)}), x_bounds=None, y_bounds=time_bounds, @@ -207,10 +203,10 @@ def test_lbcode3x23(self): 0, ) ] - self.assertCoordsAndDimsListsMatch(res, expected) + assert_coords_and_dims_lists_match(res, expected) -class TestLBTIMx2x_ZeroYears(TestField): +class TestLBTIMx2x_ZeroYears: _spec = [ "lbtim", "lbcode", @@ -278,29 +274,25 @@ def test_month_coord(self): None, ), ] - self.assertCoordsAndDimsListsMatch(res, expected) + assert_coords_and_dims_lists_match(res, expected) def test_diff_month(self): field = self._make_field(lbmon=3, lbmond=4) field.mock_add_spec(self._spec) res = _all_other_rules(field)[AUX_COORDS_INDEX] - self.assertCoordsAndDimsListsMatch(res, []) + assert_coords_and_dims_lists_match(res, []) def test_nonzero_year(self): field = self._make_field(lbyr=1) field.mock_add_spec(self._spec) res = _all_other_rules(field)[AUX_COORDS_INDEX] - self.assertCoordsAndDimsListsMatch(res, []) + assert_coords_and_dims_lists_match(res, []) def test_nonzero_yeard(self): field = self._make_field(lbyrd=1) field.mock_add_spec(self._spec) res = _all_other_rules(field)[AUX_COORDS_INDEX] - self.assertCoordsAndDimsListsMatch(res, []) - - -if __name__ == "__main__": - tests.main() + assert_coords_and_dims_lists_match(res, []) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py index 7d502bc2d6..523aef0e64 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py @@ -7,56 +7,53 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats.pp_load_rules import _collapse_degenerate_points_and_bounds +from iris.tests._shared_utils import assert_array_equal -class Test(tests.IrisTest): +class Test: def test_scalar(self): array = np.array(1) points, bounds = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(points, array) - self.assertIsNone(bounds) + assert_array_equal(points, array) + assert bounds is None def test_1d_nochange(self): array = np.array([1, 1, 3]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, array) + assert_array_equal(result, array) def test_1d_collapse(self): array = np.array([1, 1, 1]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1])) + assert_array_equal(result, np.array([1])) def test_2d_nochange(self): array = np.array([[1, 2, 3], [4, 5, 6]]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, array) + assert_array_equal(result, array) def test_2d_collapse_dim0(self): array = np.array([[1, 2, 3], [1, 2, 3]]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[1, 2, 3]])) + assert_array_equal(result, np.array([[1, 2, 3]])) def test_2d_collapse_dim1(self): array = np.array([[1, 1, 1], [2, 2, 2]]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[1], [2]])) + assert_array_equal(result, np.array([[1], [2]])) def test_2d_collapse_both(self): array = np.array([[3, 3, 3], [3, 3, 3]]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[3]])) + assert_array_equal(result, np.array([[3]])) def test_3d(self): array = np.array([[[3, 3, 3], [4, 4, 4]], [[3, 3, 3], [4, 4, 4]]]) result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[[3], [4]]])) + assert_array_equal(result, np.array([[[3], [4]]])) def test_multiple_odd_dims(self): # Test to ensure multiple collapsed dimensions don't interfere. @@ -66,24 +63,20 @@ def test_multiple_odd_dims(self): array[:, :, 1:] = array[:, :, 0:1] array[:, :, :, 1:] = array[:, :, :, 0:1] result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertEqual(array.shape, (3, 3, 3, 3, 3)) - self.assertEqual(result.shape, (1, 3, 1, 1, 3)) - self.assertTrue(np.all(result == array[0:1, :, 0:1, 0:1, :])) + assert array.shape == (3, 3, 3, 3, 3) + assert result.shape == (1, 3, 1, 1, 3) + assert np.all(result == array[0:1, :, 0:1, 0:1, :]) def test_bounds_collapse(self): points = np.array([1, 1, 1]) bounds = np.array([[0, 1], [0, 1], [0, 1]]) result_pts, result_bds = _collapse_degenerate_points_and_bounds(points, bounds) - self.assertArrayEqual(result_pts, np.array([1])) - self.assertArrayEqual(result_bds, np.array([[0, 1]])) + assert_array_equal(result_pts, np.array([1])) + assert_array_equal(result_bds, np.array([[0, 1]])) def test_bounds_no_collapse(self): points = np.array([1, 1, 1]) bounds = np.array([[0, 1], [0, 1], [0, 2]]) result_pts, result_bds = _collapse_degenerate_points_and_bounds(points, bounds) - self.assertArrayEqual(result_pts, points) - self.assertArrayEqual(result_bds, bounds) - - -if __name__ == "__main__": - tests.main() + assert_array_equal(result_pts, points) + assert_array_equal(result_bds, bounds) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py index bc3cf8ed86..ad15d7a395 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py @@ -7,27 +7,17 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from iris.coords import DimCoord from iris.fileformats.pp_load_rules import _convert_scalar_pseudo_level_coords -from iris.tests.unit.fileformats import TestField -class Test(TestField): +class Test: def test_valid(self): coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=21) - self.assertEqual( - coords_and_dims, - [(DimCoord([21], long_name="pseudo_level", units="1"), None)], - ) + assert coords_and_dims == [ + (DimCoord([21], long_name="pseudo_level", units="1"), None) + ] def test_missing_indicator(self): coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=0) - self.assertEqual(coords_and_dims, []) - - -if __name__ == "__main__": - tests.main() + assert coords_and_dims == [] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py index ac28fe0a1c..5d01eb5976 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py @@ -7,27 +7,17 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from iris.coords import DimCoord from iris.fileformats.pp_load_rules import _convert_scalar_realization_coords -from iris.tests.unit.fileformats import TestField -class Test(TestField): +class Test: def test_valid(self): coords_and_dims = _convert_scalar_realization_coords(lbrsvd4=21) - self.assertEqual( - coords_and_dims, - [(DimCoord([21], standard_name="realization", units="1"), None)], - ) + assert coords_and_dims == [ + (DimCoord([21], standard_name="realization", units="1"), None) + ] def test_missing_indicator(self): coords_and_dims = _convert_scalar_realization_coords(lbrsvd4=0) - self.assertEqual(coords_and_dims, []) - - -if __name__ == "__main__": - tests.main() + assert coords_and_dims == [] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py index 5cebc009b9..690f869a34 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py @@ -7,10 +7,6 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from cf_units import CALENDAR_360_DAY, CALENDAR_STANDARD, Unit from cftime import datetime as nc_datetime import numpy as np @@ -18,7 +14,8 @@ from iris.coords import AuxCoord, DimCoord from iris.fileformats.pp import SplittableInt from iris.fileformats.pp_load_rules import _convert_time_coords -from iris.tests.unit.fileformats import TestField +from iris.tests._shared_utils import assert_array_all_close +from iris.tests.unit.fileformats.pp_load_rules import assert_coords_and_dims_lists_match def _lbtim(ia=0, ib=0, ic=0): @@ -40,7 +37,7 @@ def _lbcode(value=None, ix=None, iy=None): _HOURS_UNIT = Unit("hours") -class TestLBTIMx0x_SingleTimepoint(TestField): +class TestLBTIMx0x_SingleTimepoint: def _check_timepoint(self, lbcode, expect_match=True): lbtim = _lbtim(ib=0, ic=1) t1 = nc_datetime(1970, 1, 1, hour=6, minute=0, second=0) @@ -69,7 +66,7 @@ def _check_timepoint(self, lbcode, expect_match=True): ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) def test_normal_xy_dims(self): self._check_timepoint(_lbcode(1)) @@ -81,7 +78,7 @@ def test_time_cross_section(self): self._check_timepoint(_lbcode(ix=1, iy=20), expect_match=False) -class TestLBTIMx1x_Forecast(TestField): +class TestLBTIMx1x_Forecast: def _check_forecast(self, lbcode, expect_match=True): lbtim = _lbtim(ib=1, ic=1) # Validity time @@ -126,7 +123,7 @@ def _check_forecast(self, lbcode, expect_match=True): ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) def test_normal_xy(self): self._check_forecast(_lbcode(1)) @@ -151,8 +148,8 @@ def test_exact_hours(self): ) (fp, _), (t, _), (frt, _) = coords_and_dims # These should both be exact whole numbers. - self.assertEqual(fp.points[0], 7) - self.assertEqual(t.points[0], 394927) + assert fp.points[0] == 7 + assert t.points[0] == 394927 def test_not_exact_hours(self): lbtim = _lbtim(ib=1, ic=1) @@ -167,11 +164,11 @@ def test_not_exact_hours(self): lbft=None, ) (fp, _), (t, _), (frt, _) = coords_and_dims - self.assertArrayAllClose(fp.points[0], 7.1666666, atol=0.0001, rtol=0) - self.assertArrayAllClose(t.points[0], 394927.166666, atol=0.01, rtol=0) + assert_array_all_close(fp.points[0], 7.1666666, atol=0.0001, rtol=0) + assert_array_all_close(t.points[0], 394927.166666, atol=0.01, rtol=0) -class TestLBTIMx2x_TimePeriod(TestField): +class TestLBTIMx2x_TimePeriod: def _check_period(self, lbcode, expect_match=True): lbtim = _lbtim(ib=2, ic=1) # Start time @@ -218,7 +215,7 @@ def _check_period(self, lbcode, expect_match=True): ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) def test_normal_xy(self): self._check_period(_lbcode(1)) @@ -230,7 +227,7 @@ def test_time_cross_section(self): self._check_period(_lbcode(ix=1, iy=20), expect_match=False) -class TestLBTIMx3x_YearlyAggregation(TestField): +class TestLBTIMx3x_YearlyAggregation: def _check_yearly(self, lbcode, expect_match=True): lbtim = _lbtim(ib=3, ic=1) # Start time @@ -280,7 +277,7 @@ def _check_yearly(self, lbcode, expect_match=True): ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) def test_normal_xy(self): self._check_yearly(_lbcode(1)) @@ -292,7 +289,7 @@ def test_time_cross_section(self): self._check_yearly(_lbcode(ix=1, iy=20), expect_match=False) -class TestLBTIMx2x_ZeroYear(TestField): +class TestLBTIMx2x_ZeroYear: def test_(self): lbtim = _lbtim(ib=2, ic=1) t1 = nc_datetime(0, 1, 1, has_year_zero=True) @@ -307,10 +304,10 @@ def test_(self): t2=t2, lbft=lbft, ) - self.assertEqual(coords_and_dims, []) + assert coords_and_dims == [] -class TestLBTIMxxx_Unhandled(TestField): +class TestLBTIMxxx_Unhandled: def test_unrecognised(self): lbtim = _lbtim(ib=4, ic=1) t1 = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) @@ -325,10 +322,10 @@ def test_unrecognised(self): t2=t2, lbft=lbft, ) - self.assertEqual(coords_and_dims, []) + assert coords_and_dims == [] -class TestLBCODE3xx(TestField): +class TestLBCODE3xx: def test(self): lbcode = _lbcode(value=31323) lbtim = _lbtim(ib=2, ic=2) @@ -355,10 +352,10 @@ def test(self): None, ) ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected_result) + assert_coords_and_dims_lists_match(coords_and_dims, expected_result) -class TestArrayInputWithLBTIM_0_0_1(TestField): +class TestArrayInputWithLBTIM_0_0_1: def test_t1_list(self): # lbtim ia = 0, ib = 0, ic = 1 # with a series of times (t1). @@ -387,10 +384,10 @@ def test_t1_list(self): (24 * 8) + 3 + hours, standard_name="time", units=_EPOCH_HOURS_UNIT ) expected = [(time_coord, (0,))] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) -class TestArrayInputWithLBTIM_0_1_1(TestField): +class TestArrayInputWithLBTIM_0_1_1: def test_t1_list_t2_scalar(self): # lbtim ia = 0, ib = 1, ic = 1 # with a single forecast reference time (t2) and a series @@ -436,7 +433,7 @@ def test_t1_list_t2_scalar(self): (time_coord, (0,)), (fref_time_coord, None), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) def test_t1_and_t2_list(self): # lbtim ia = 0, ib = 1, ic = 1 @@ -485,7 +482,7 @@ def test_t1_and_t2_list(self): (time_coord, (0,)), (fref_time_coord, None), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) def test_t1_and_t2_orthogonal_lists(self): # lbtim ia = 0, ib = 1, ic = 1 @@ -532,7 +529,7 @@ def test_t1_and_t2_orthogonal_lists(self): (time_coord, (0,)), (fref_time_coord, (1,)), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) def test_t1_multi_dim_list_t2_scalar(self): # Another case of lbtim ia = 0, ib = 1, ic = 1 but @@ -586,7 +583,7 @@ def test_t1_multi_dim_list_t2_scalar(self): (time_coord, (0, 1)), (fref_time_coord, None), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) def test_t1_and_t2_nparrays(self): # lbtim ia = 0, ib = 1, ic = 1 @@ -639,10 +636,10 @@ def test_t1_and_t2_nparrays(self): (time_coord, (0,)), (fref_time_coord, None), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) -class TestArrayInputWithLBTIM_0_2_1(TestField): +class TestArrayInputWithLBTIM_0_2_1: def test_t1_list_t2_scalar(self): lbtim = _lbtim(ib=2, ic=1) lbcode = _lbcode(1) @@ -694,10 +691,10 @@ def test_t1_list_t2_scalar(self): (time_coord, (0,)), (fref_time_coord, None), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) + assert_coords_and_dims_lists_match(coords_and_dims, expected) -class TestArrayInputWithLBTIM_0_3_1(TestField): +class TestArrayInputWithLBTIM_0_3_1: def test_t1_scalar_t2_list(self): lbtim = _lbtim(ib=3, ic=1) lbcode = _lbcode(1) @@ -754,8 +751,4 @@ def test_t1_scalar_t2_list(self): (time_coord, (0,)), (fref_time_coord, (0,)), ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - -if __name__ == "__main__": - tests.main() + assert_coords_and_dims_lists_match(coords_and_dims, expected) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py index 0e159b254e..7f6270e9f3 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py @@ -7,17 +7,13 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.aux_factory import HybridHeightFactory, HybridPressureFactory from iris.coords import AuxCoord, DimCoord from iris.fileformats.pp import STASH, SplittableInt from iris.fileformats.pp_load_rules import Reference, _convert_vertical_coords -from iris.tests.unit.fileformats import TestField +from iris.tests.unit.fileformats.pp_load_rules import assert_coords_and_dims_lists_match def _lbcode(value=None, ix=None, iy=None): @@ -31,7 +27,7 @@ def _lbcode(value=None, ix=None, iy=None): return result -class TestLBVC001_Height(TestField): +class TestLBVC001_Height: def _check_height( self, blev, @@ -89,8 +85,8 @@ def _check_height( ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_normal_height__present(self): self._check_height(blev=12.3, stash=STASH(1, 1, 1)) @@ -172,7 +168,7 @@ def test_implied_height_10m__vector(self): ) -class TestLBVC002_Depth(TestField): +class TestLBVC002_Depth: def _check_depth( self, lbcode, @@ -248,8 +244,8 @@ def _check_depth( ) else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_unbounded(self): self._check_depth(_lbcode(1), lblev=23.0, expect_bounds=False) @@ -323,7 +319,7 @@ def test_cross_section__vector(self): ) -class TestLBVC006_SoilLevel(TestField): +class TestLBVC006_SoilLevel: def _check_soil_level(self, lbcode, lblev=12.3, expect_match=True, dim=None): lbvc = 6 stash = STASH(1, 1, 1) @@ -354,8 +350,8 @@ def _check_soil_level(self, lbcode, lblev=12.3, expect_match=True, dim=None): units="1", ) expect_result = [(coord, dim)] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_normal(self): self._check_soil_level(_lbcode(0)) @@ -374,7 +370,7 @@ def test_cross_section__vector(self): ) -class TestLBVC006_SoilDepth(TestField): +class TestLBVC006_SoilDepth: def _check_soil_depth( self, lbcode, @@ -410,8 +406,8 @@ def _check_soil_depth( attributes={"positive": "down"}, ) expect_result = [(coord, dim)] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_normal(self): self._check_soil_depth(_lbcode(0)) @@ -450,7 +446,7 @@ def test_cross_section__vector(self): ) -class TestLBVC008_Pressure(TestField): +class TestLBVC008_Pressure: def _check_pressure(self, lbcode, blev=250.3, expect_match=True, dim=None): lbvc = 8 stash = STASH(1, 1, 1) @@ -479,8 +475,8 @@ def _check_pressure(self, lbcode, blev=250.3, expect_match=True, dim=None): expect_result = [(DimCoord(blev, long_name="pressure", units="hPa"), dim)] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_normal(self): self._check_pressure(_lbcode(0)) @@ -504,7 +500,7 @@ def test_pressure_cross_section__vector(self): self._check_pressure(_lbcode(ix=10, iy=1), blev=blev, dim=1, expect_match=False) -class TestLBVC019_PotentialTemperature(TestField): +class TestLBVC019_PotentialTemperature: def _check_potm(self, lbcode, blev=130.6, expect_match=True, dim=None): lbvc = 19 stash = STASH(1, 1, 1) @@ -543,8 +539,8 @@ def _check_potm(self, lbcode, blev=130.6, expect_match=True, dim=None): ] else: expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) + assert_coords_and_dims_lists_match(coords_and_dims, expect_result) + assert factories == [] def test_normal(self): self._check_potm(_lbcode(0)) @@ -561,7 +557,7 @@ def test_cross_section__vector(self): self._check_potm(_lbcode(ix=10, iy=11), blev=blev, dim=1, expect_match=False) -class TestLBVC009_HybridPressure(TestField): +class TestLBVC009_HybridPressure: def _check( self, lblev=37.0, @@ -638,8 +634,8 @@ def _check( ], ) ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_coords_and_dims) - self.assertEqual(factories, expect_factories) + assert_coords_and_dims_lists_match(coords_and_dims, expect_coords_and_dims) + assert factories == expect_factories def test_normal(self): self._check() @@ -664,7 +660,7 @@ def test_normal__vector(self): ) -class TestLBVC065_HybridHeight(TestField): +class TestLBVC065_HybridHeight: def _check( self, lblev=37.0, @@ -740,8 +736,8 @@ def _check( ], ) ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_coords_and_dims) - self.assertEqual(factories, expect_factories) + assert_coords_and_dims_lists_match(coords_and_dims, expect_coords_and_dims) + assert factories == expect_factories def test_normal(self): self._check() @@ -767,7 +763,7 @@ def test_normal__vector(self): ) -class TestLBVCxxx_Unhandled(TestField): +class TestLBVCxxx_Unhandled: def test_unknown_lbvc(self): lbvc = 999 blev, lblev, bhlev, bhrlev, brsvd1, brsvd2, brlev = ( @@ -793,9 +789,5 @@ def test_unknown_lbvc(self): brsvd2=brsvd2, brlev=brlev, ) - self.assertEqual(coords_and_dims, []) - self.assertEqual(factories, []) - - -if __name__ == "__main__": - tests.main() + assert coords_and_dims == [] + assert factories == [] diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py index 176d0a38a1..496b82c6c8 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py @@ -4,16 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules._dim_or_aux`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.coords import AuxCoord, DimCoord from iris.fileformats.pp_load_rules import _dim_or_aux -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self): self.mono = list(range(5)) self.non_mono = [0, 1, 3, 2, 4] self.std_name = "depth" @@ -33,7 +32,7 @@ def test_dim_monotonic(self): units=self.units, attributes=self.attr, ) - self.assertEqual(result, expected) + assert result == expected def test_dim_non_monotonic(self): result = _dim_or_aux( @@ -50,8 +49,4 @@ def test_dim_non_monotonic(self): units=self.units, attributes=attr, ) - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() + assert result == expected diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py index f2f19d9bb1..826156dade 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py @@ -7,15 +7,13 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import cf_units from cf_units import Unit from cftime import datetime as nc_datetime +import pytest from iris.fileformats.pp_load_rules import _epoch_date_hours as epoch_hours_call +from iris.tests._shared_utils import assert_array_all_close # # Run tests for each of the possible calendars from PPfield.calendar(). @@ -24,30 +22,31 @@ # -class TestEpochHours__standard(tests.IrisTest): - def setUp(self): +class TestEpochHours__standard: + @pytest.fixture(autouse=True) + def _setup(self): self.calendar = cf_units.CALENDAR_STANDARD self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) def test_1970_1_1(self): test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) + assert result == 0.0 def test_ymd_1_1_1(self): test_date = nc_datetime(1, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17259936.0) + assert result == -17259936.0 def test_year_0(self): test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17268720.0) + assert result == -17268720.0 def test_ymd_0_0_0(self): test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17269488.0) + assert result == -17269488.0 def test_ymd_0_preserves_timeofday(self): hrs, mins, secs, usecs = (7, 13, 24, 335772) @@ -69,64 +68,66 @@ def test_ymd_0_preserves_timeofday(self): # NOTE: the calculation is only accurate to approx +/- 0.5 seconds # in such a large number of hours -- even 0.1 seconds is too fine. absolute_tolerance = 0.5 / 3600 - self.assertArrayAllClose( + assert_array_all_close( result, -17269488.0 + hours_in_day, rtol=0, atol=absolute_tolerance ) -class TestEpochHours__360day(tests.IrisTest): - def setUp(self): +class TestEpochHours__360day: + @pytest.fixture(autouse=True) + def _setup(self): self.calendar = cf_units.CALENDAR_360_DAY self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) def test_1970_1_1(self): test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) + assert result == 0.0 def test_ymd_1_1_1(self): test_date = nc_datetime(1, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17012160.0) + assert result == -17012160.0 def test_year_0(self): test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17020800.0) + assert result == -17020800.0 def test_ymd_0_0_0(self): test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17021544.0) + assert result == -17021544.0 -class TestEpochHours__365day(tests.IrisTest): - def setUp(self): +class TestEpochHours__365day: + @pytest.fixture(autouse=True) + def _setup(self): self.calendar = cf_units.CALENDAR_365_DAY self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) def test_1970_1_1(self): test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) + assert result == 0.0 def test_ymd_1_1_1(self): test_date = nc_datetime(1, 1, 1, calendar=self.calendar) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17248440.0) + assert result == -17248440.0 def test_year_0(self): test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17257200.0) + assert result == -17257200.0 def test_ymd_0_0_0(self): test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17257968.0) + assert result == -17257968.0 -class TestEpochHours__invalid_calendar(tests.IrisTest): +class TestEpochHours__invalid_calendar: def test_bad_calendar(self): self.calendar = cf_units.CALENDAR_ALL_LEAP # Setup a unit with an unrecognised calendar @@ -134,9 +135,5 @@ def test_bad_calendar(self): # Test against a date with year=0, which requires calendar correction. test_date = nc_datetime(0, 1, 1, calendar=self.calendar, has_year_zero=True) # Check that this causes an error. - with self.assertRaisesRegex(ValueError, "unrecognised calendar"): + with pytest.raises(ValueError, match="unrecognised calendar"): epoch_hours_call(hrs_unit, test_date) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py index 65c6bc8442..c721e82754 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py @@ -4,23 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from iris.fileformats.pp_load_rules import _model_level_number -class Test_9999(tests.IrisTest): +class Test_9999: def test(self): - self.assertEqual(_model_level_number(9999), 0) + assert _model_level_number(9999) == 0 -class Test_lblev(tests.IrisTest): +class Test_lblev: def test(self): for val in range(9999): - self.assertEqual(_model_level_number(val), val) - - -if __name__ == "__main__": - tests.main() + assert _model_level_number(val) == val diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py index 6dfc6189bb..72e59963e7 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py @@ -7,64 +7,61 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats.pp_load_rules import _reduce_points_and_bounds +from iris.tests._shared_utils import assert_array_equal -class Test(tests.IrisTest): +class Test: def test_scalar(self): array = np.array(1) dims, result, bounds = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, None) - self.assertIsNone(bounds) + assert_array_equal(result, array) + assert dims is None + assert bounds is None def test_1d_nochange(self): array = np.array([1, 2, 3]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, (0,)) + assert_array_equal(result, array) + assert dims == (0,) def test_1d_collapse(self): array = np.array([1, 1, 1]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array(1)) - self.assertEqual(dims, None) + assert_array_equal(result, np.array(1)) + assert dims is None def test_2d_nochange(self): array = np.array([[1, 2, 3], [4, 5, 6]]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, (0, 1)) + assert_array_equal(result, array) + assert dims == (0, 1) def test_2d_collapse_dim0(self): array = np.array([[1, 2, 3], [1, 2, 3]]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1, 2, 3])) - self.assertEqual(dims, (1,)) + assert_array_equal(result, np.array([1, 2, 3])) + assert dims == (1,) def test_2d_collapse_dim1(self): array = np.array([[1, 1, 1], [2, 2, 2]]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1, 2])) - self.assertEqual(dims, (0,)) + assert_array_equal(result, np.array([1, 2])) + assert dims == (0,) def test_2d_collapse_both(self): array = np.array([[3, 3, 3], [3, 3, 3]]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array(3)) - self.assertEqual(dims, None) + assert_array_equal(result, np.array(3)) + assert dims is None def test_3d(self): array = np.array([[[3, 3, 3], [4, 4, 4]], [[3, 3, 3], [4, 4, 4]]]) dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([3, 4])) - self.assertEqual(dims, (1,)) + assert_array_equal(result, np.array([3, 4])) + assert dims == (1,) def test_bounds_collapse(self): points = np.array([1, 1, 1]) @@ -72,9 +69,9 @@ def test_bounds_collapse(self): result_dims, result_pts, result_bds = _reduce_points_and_bounds( points, (bounds[..., 0], bounds[..., 1]) ) - self.assertArrayEqual(result_pts, np.array(1)) - self.assertArrayEqual(result_bds, np.array([0, 2])) - self.assertEqual(result_dims, None) + assert_array_equal(result_pts, np.array(1)) + assert_array_equal(result_bds, np.array([0, 2])) + assert result_dims is None def test_bounds_no_collapse(self): points = np.array([1, 2, 3]) @@ -82,10 +79,6 @@ def test_bounds_no_collapse(self): result_dims, result_pts, result_bds = _reduce_points_and_bounds( points, (bounds[..., 0], bounds[..., 1]) ) - self.assertArrayEqual(result_pts, points) - self.assertArrayEqual(result_bds, bounds) - self.assertEqual(result_dims, (0,)) - - -if __name__ == "__main__": - tests.main() + assert_array_equal(result_pts, points) + assert_array_equal(result_bds, bounds) + assert result_dims == (0,) diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py index 69ff56391e..0bb8d55aae 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py @@ -7,26 +7,24 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.fileformats.pp_load_rules import _reshape_vector_args +from iris.tests._shared_utils import assert_array_equal -class TestEmpty(tests.IrisTest): +class TestEmpty: def test(self): result = _reshape_vector_args([]) - self.assertEqual(result, []) + assert result == [] -class TestSingleArg(tests.IrisTest): +class TestSingleArg: def _check(self, result, expected): - self.assertEqual(len(result), len(expected)) + assert len(result) == len(expected) for result_arr, expected_arr in zip(result, expected): - self.assertArrayEqual(result_arr, expected_arr) + assert_array_equal(result_arr, expected_arr) def test_nochange(self): points = np.array([[1, 2, 3], [4, 5, 6]]) @@ -36,7 +34,7 @@ def test_nochange(self): def test_bad_dimensions(self): points = np.array([[1, 2, 3], [4, 5, 6]]) - with self.assertRaisesRegex(ValueError, "Length"): + with pytest.raises(ValueError, match="Length"): _reshape_vector_args([(points, (0, 1, 2))]) def test_scalar(self): @@ -64,11 +62,11 @@ def test_extend(self): self._check(result, expected) -class TestMultipleArgs(tests.IrisTest): +class TestMultipleArgs: def _check(self, result, expected): - self.assertEqual(len(result), len(expected)) + assert len(result) == len(expected) for result_arr, expected_arr in zip(result, expected): - self.assertArrayEqual(result_arr, expected_arr) + assert_array_equal(result_arr, expected_arr) def test_nochange(self): a1 = np.array([[1, 2, 3], [4, 5, 6]]) @@ -131,7 +129,3 @@ def test_triple(self): a3.reshape(1, 1, 1), ] self._check(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py index 476ecbc8ae..d9e1e74e00 100644 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py +++ b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py @@ -4,10 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.pp_load_rules.convert`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from types import MethodType from unittest import mock @@ -17,10 +13,41 @@ from iris.fileformats.pp import STASH, PPField3, SplittableInt from iris.fileformats.pp_load_rules import convert -import iris.tests.unit.fileformats +from iris.tests._shared_utils import assert_array_equal from iris.util import guess_coord_axis +def assert_test_for_coord( + field, convert, coord_predicate, expected_points, expected_bounds +): + ( + factories, + references, + standard_name, + long_name, + units, + attributes, + cell_methods, + dim_coords_and_dims, + aux_coords_and_dims, + ) = convert(field) + + # Check for one and only one matching coordinate. + coords_and_dims = dim_coords_and_dims + aux_coords_and_dims + matching_coords = [coord for coord, _ in coords_and_dims if coord_predicate(coord)] + assert len(matching_coords) == 1, str(matching_coords) + coord = matching_coords[0] + + # Check points and bounds. + if expected_points is not None: + assert_array_equal(coord.points, expected_points) + + if expected_bounds is None: + assert coord.bounds is None + else: + assert_array_equal(coord.bounds, expected_bounds) + + def _mock_field(**kwargs): # Generate a mock field, but ensure T1 and T2 viable for rules. field = mock.MagicMock( @@ -31,7 +58,7 @@ def _mock_field(**kwargs): return field -class TestLBCODE(iris.tests.unit.fileformats.TestField): +class TestLBCODE: @staticmethod def _is_cross_section_height_coord(coord): return ( @@ -45,7 +72,7 @@ def test_cross_section_height_bdy_zero(self): points = np.array([10, 20, 30, 40]) bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]]) field = _mock_field(lbcode=lbcode, bdy=0, y=points, y_bounds=bounds) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBCODE._is_cross_section_height_coord, @@ -61,7 +88,7 @@ def test_cross_section_height_bdy_bmdi(self): field = _mock_field( lbcode=lbcode, bdy=bmdi, bmdi=bmdi, y=points, y_bounds=bounds ) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBCODE._is_cross_section_height_coord, @@ -70,7 +97,7 @@ def test_cross_section_height_bdy_bmdi(self): ) -class TestLBVC(iris.tests.unit.fileformats.TestField): +class TestLBVC: @staticmethod def _is_potm_level_coord(coord): return ( @@ -113,7 +140,7 @@ def _is_soil_depth_coord(coord): def test_soil_levels(self): level = 1234 field = _mock_field(lbvc=6, lblev=level, brsvd=[0, 0], brlev=0) - self._test_for_coord( + assert_test_for_coord( field, convert, self._is_soil_model_level_number_coord, @@ -124,7 +151,7 @@ def test_soil_levels(self): def test_soil_depth(self): lower, point, upper = 1.2, 3.4, 5.6 field = _mock_field(lbvc=6, blev=point, brsvd=[lower, 0], brlev=upper) - self._test_for_coord( + assert_test_for_coord( field, convert, self._is_soil_depth_coord, @@ -143,7 +170,7 @@ def test_hybrid_pressure_model_level_number(self): bhrlev=45, brsvd=[17, 40], ) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBVC._is_model_level_number_coord, @@ -164,7 +191,7 @@ def test_hybrid_pressure_delta(self): bhrlev=delta_lower_bound, brsvd=[17, delta_upper_bound], ) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBVC._is_level_pressure_coord, @@ -185,7 +212,7 @@ def test_hybrid_pressure_sigma(self): bhrlev=11, brsvd=[sigma_upper_bound, 13], ) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBVC._is_sigma_coord, @@ -196,7 +223,7 @@ def test_hybrid_pressure_sigma(self): def test_potential_temperature_levels(self): potm_value = 27.32 field = _mock_field(lbvc=19, blev=potm_value) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBVC._is_potm_level_coord, @@ -205,7 +232,7 @@ def test_potential_temperature_levels(self): ) -class TestLBTIM(iris.tests.unit.fileformats.TestField): +class TestLBTIM: def test_365_calendar(self): f = mock.MagicMock( lbtim=SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}), @@ -238,10 +265,10 @@ def is_t_coord(coord_and_dims): return coord.standard_name == "time" coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims)) - self.assertEqual(len(coords_and_dims), 1) + assert len(coords_and_dims) == 1 coord, dims = coords_and_dims[0] - self.assertEqual(guess_coord_axis(coord), "T") - self.assertEqual(coord.units.calendar, "365_day") + assert guess_coord_axis(coord) == "T" + assert coord.units.calendar == "365_day" def base_field(self): field = PPField3(header=mock.MagicMock()) @@ -278,7 +305,7 @@ def test_time_mean_ib2(self): field.lbyrd, field.lbmond, field.lbdatd = 1970, 1, 2 field.lbhrd, field.lbmind, field.lbsecd = 15, 0, 0 - self._test_for_coord( + assert_test_for_coord( field, convert, self.is_forecast_period, @@ -286,7 +313,7 @@ def test_time_mean_ib2(self): expected_bounds=[[6, 9]], ) - self._test_for_coord( + assert_test_for_coord( field, convert, self.is_time, @@ -306,7 +333,7 @@ def test_time_mean_ib3(self): field.lbyrd, field.lbmond, field.lbdatd = 1971, 1, 2 field.lbhrd, field.lbmind, field.lbsecd = 15, 0, 0 - self._test_for_coord( + assert_test_for_coord( field, convert, self.is_forecast_period, @@ -314,7 +341,7 @@ def test_time_mean_ib3(self): expected_bounds=[[36 - 30, lbft]], ) - self._test_for_coord( + assert_test_for_coord( field, convert, self.is_time, @@ -323,7 +350,7 @@ def test_time_mean_ib3(self): ) -class TestLBRSVD(iris.tests.unit.fileformats.TestField): +class TestLBRSVD: @staticmethod def _is_realization(coord): return coord.standard_name == "realization" and coord.units == "1" @@ -334,7 +361,7 @@ def test_realization(self): points = np.array([71]) bounds = None field = _mock_field(lbrsvd=lbrsvd) - self._test_for_coord( + assert_test_for_coord( field, convert, TestLBRSVD._is_realization, @@ -343,7 +370,7 @@ def test_realization(self): ) -class TestLBSRCE(iris.tests.IrisTest): +class TestLBSRCE: def check_um_source_attrs(self, lbsrce, source_str=None, um_version_str=None): field = _mock_field(lbsrce=lbsrce) ( @@ -358,13 +385,13 @@ def check_um_source_attrs(self, lbsrce, source_str=None, um_version_str=None): aux_coords_and_dims, ) = convert(field) if source_str is not None: - self.assertEqual(attributes["source"], source_str) + assert attributes["source"] == source_str else: - self.assertNotIn("source", attributes) + assert "source" not in attributes if um_version_str is not None: - self.assertEqual(attributes["um_version"], um_version_str) + assert attributes["um_version"] == um_version_str else: - self.assertNotIn("um_version", attributes) + assert "um_version" not in attributes def test_none(self): self.check_um_source_attrs(lbsrce=8123, source_str=None, um_version_str=None) @@ -384,7 +411,7 @@ def test_um_version(self): ) -class Test_STASH_CF(iris.tests.unit.fileformats.TestField): +class Test_STASH_CF: def test_stash_cf_air_temp(self): lbuser = [1, 0, 0, 16203, 0, 0, 1] lbfc = 16 @@ -401,8 +428,8 @@ def test_stash_cf_air_temp(self): dim_coords_and_dims, aux_coords_and_dims, ) = convert(field) - self.assertEqual(standard_name, "air_temperature") - self.assertEqual(units, "K") + assert standard_name == "air_temperature" + assert units == "K" def test_no_std_name(self): lbuser = [1, 0, 0, 0, 0, 0, 0] @@ -420,11 +447,11 @@ def test_no_std_name(self): dim_coords_and_dims, aux_coords_and_dims, ) = convert(field) - self.assertIsNone(standard_name) - self.assertIsNone(units) + assert standard_name is None + assert units is None -class Test_LBFC_CF(iris.tests.unit.fileformats.TestField): +class Test_LBFC_CF: def test_fc_cf_air_temp(self): lbuser = [1, 0, 0, 0, 0, 0, 0] lbfc = 16 @@ -441,9 +468,5 @@ def test_fc_cf_air_temp(self): dim_coords_and_dims, aux_coords_and_dims, ) = convert(field) - self.assertEqual(standard_name, "air_temperature") - self.assertEqual(units, "K") - - -if __name__ == "__main__": - tests.main() + assert standard_name == "air_temperature" + assert units == "K" diff --git a/lib/iris/tests/unit/fileformats/rules/test_Loader.py b/lib/iris/tests/unit/fileformats/rules/test_Loader.py index fafa018d3a..08e0764a45 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_Loader.py +++ b/lib/iris/tests/unit/fileformats/rules/test_Loader.py @@ -4,40 +4,29 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformats.rules.Loader`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.rules import Loader +from iris.tests._shared_utils import assert_no_warnings_regexp -class Test___init__(tests.IrisTest): - def test_normal(self): - with mock.patch("warnings.warn") as warn: +class Test___init__: + def test_normal(self, mocker): + with assert_no_warnings_regexp(): loader = Loader( - mock.sentinel.GEN_FUNC, - mock.sentinel.GEN_FUNC_KWARGS, - mock.sentinel.CONVERTER, + mocker.sentinel.GEN_FUNC, + mocker.sentinel.GEN_FUNC_KWARGS, + mocker.sentinel.CONVERTER, ) - self.assertEqual(warn.call_count, 0) - self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) - self.assertIs(loader.converter, mock.sentinel.CONVERTER) + assert loader.field_generator is mocker.sentinel.GEN_FUNC + assert loader.field_generator_kwargs is mocker.sentinel.GEN_FUNC_KWARGS + assert loader.converter is mocker.sentinel.CONVERTER - def test_normal_with_explicit_none(self): - with mock.patch("warnings.warn") as warn: + def test_normal_with_explicit_none(self, mocker): + with assert_no_warnings_regexp(): loader = Loader( - mock.sentinel.GEN_FUNC, - mock.sentinel.GEN_FUNC_KWARGS, - mock.sentinel.CONVERTER, + mocker.sentinel.GEN_FUNC, + mocker.sentinel.GEN_FUNC_KWARGS, + mocker.sentinel.CONVERTER, ) - self.assertEqual(warn.call_count, 0) - self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) - self.assertIs(loader.converter, mock.sentinel.CONVERTER) - - -if __name__ == "__main__": - tests.main() + assert loader.field_generator is mocker.sentinel.GEN_FUNC + assert loader.field_generator_kwargs is mocker.sentinel.GEN_FUNC_KWARGS + assert loader.converter is mocker.sentinel.CONVERTER diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py index 91862658e5..c14a5df705 100644 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py @@ -4,19 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.rules._make_cube`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from unittest import mock -import warnings import numpy as np +import pytest from iris.fileformats.rules import ConversionMetadata, _make_cube -class Test(tests.IrisTest): +class Test: def test_invalid_units(self): # Mock converter() function that returns an invalid # units string amongst the collection of other elements. @@ -46,20 +42,12 @@ def test_invalid_units(self): field = mock.Mock( core_data=lambda: data, bmdi=9999.0, realised_dtype=data.dtype ) - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") + + exp_emsg = "invalid units {!r}".format(units) + with pytest.warns(match=exp_emsg): cube, factories, references = _make_cube(field, converter) # Check attributes dictionary is correctly populated. expected_attributes = attributes.copy() expected_attributes["invalid_units"] = units - self.assertEqual(cube.attributes, expected_attributes) - - # Check warning was raised. - self.assertEqual(len(warn), 1) - exp_emsg = "invalid units {!r}".format(units) - self.assertRegex(str(warn[0]), exp_emsg) - - -if __name__ == "__main__": - tests.main() + assert cube.attributes == expected_attributes diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/rules/test_rules.py similarity index 81% rename from lib/iris/tests/unit/fileformats/test_rules.py rename to lib/iris/tests/unit/fileformats/rules/test_rules.py index d39b6a997d..df3c769a70 100644 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ b/lib/iris/tests/unit/fileformats/rules/test_rules.py @@ -4,14 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Test iris.fileformats.rules.py - metadata translation rules.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import types from unittest import mock import numpy as np +import pytest from iris.aux_factory import HybridHeightFactory from iris.coords import CellMethod @@ -26,30 +23,31 @@ load_cubes, scalar_cell_method, ) +from iris.tests._shared_utils import skip_data import iris.tests.stock as stock -class TestConcreteReferenceTarget(tests.IrisTest): +class TestConcreteReferenceTarget: def test_attributes(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): target = ConcreteReferenceTarget() target = ConcreteReferenceTarget("foo") - self.assertEqual(target.name, "foo") - self.assertIsNone(target.transform) + assert target.name == "foo" + assert target.transform is None def transform(_): return _ target = ConcreteReferenceTarget("foo", transform) - self.assertEqual(target.name, "foo") - self.assertIs(target.transform, transform) + assert target.name == "foo" + assert target.transform is transform def test_single_cube_no_transform(self): target = ConcreteReferenceTarget("foo") src = stock.simple_2d() target.add_cube(src) - self.assertIs(target.as_cube(), src) + assert target.as_cube() is src def test_single_cube_with_transform(self): def transform(cube): @@ -59,22 +57,22 @@ def transform(cube): src = stock.simple_2d() target.add_cube(src) dest = target.as_cube() - self.assertEqual(dest.long_name, "wibble") - self.assertNotEqual(dest, src) + assert dest.long_name == "wibble" + assert dest != src dest.long_name = src.long_name - self.assertEqual(dest, src) + assert dest == src - @tests.skip_data + @skip_data def test_multiple_cubes_no_transform(self): target = ConcreteReferenceTarget("foo") src = stock.realistic_4d() for i in range(src.shape[0]): target.add_cube(src[i]) dest = target.as_cube() - self.assertIsNot(dest, src) - self.assertEqual(dest, src) + assert dest is not src + assert dest == src - @tests.skip_data + @skip_data def test_multiple_cubes_with_transform(self): def transform(cube): return {"long_name": "wibble"} @@ -84,13 +82,13 @@ def transform(cube): for i in range(src.shape[0]): target.add_cube(src[i]) dest = target.as_cube() - self.assertEqual(dest.long_name, "wibble") - self.assertNotEqual(dest, src) + assert dest.long_name == "wibble" + assert dest != src dest.long_name = src.long_name - self.assertEqual(dest, src) + assert dest == src -class TestLoadCubes(tests.IrisTest): +class TestLoadCubes: def test_simple_factory(self): # Test the creation process for a factory definition which only # uses simple dict arguments. @@ -124,7 +122,7 @@ def converter(field): cubes = load_cubes(["fake_filename"], None, fake_loader) # Check the result is a generator with a single entry. - self.assertIsInstance(cubes, types.GeneratorType) + assert isinstance(cubes, types.GeneratorType) try: # Suppress the normal Cube.coord() and Cube.add_aux_factory() # methods. @@ -139,15 +137,15 @@ def converter(field): finally: Cube.coord = coord_method Cube.add_aux_factory = add_aux_factory_method - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. - self.assertTrue(hasattr(cubes[0], "fake_aux_factory")) - self.assertIs(cubes[0].fake_aux_factory, aux_factory) - self.assertTrue(hasattr(aux_factory, "fake_args")) - self.assertEqual(aux_factory.fake_args, ({"name": "foo"},)) + assert hasattr(cubes[0], "fake_aux_factory") + assert cubes[0].fake_aux_factory is aux_factory + assert hasattr(aux_factory, "fake_args") + assert aux_factory.fake_args == ({"name": "foo"},) - @tests.skip_data + @skip_data def test_cross_reference(self): # Test the creation process for a factory definition which uses # a cross-reference. @@ -219,42 +217,38 @@ def converter(field): cubes = load_cubes(["fake_filename"], None, fake_loader) # Check the result is a generator containing two Cubes. - self.assertIsInstance(cubes, types.GeneratorType) + assert isinstance(cubes, types.GeneratorType) cubes = list(cubes) - self.assertEqual(len(cubes), 2) + assert len(cubes) == 2 # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. - self.assertEqual(len(cubes[1].aux_factories), 1) - self.assertEqual(len(cubes[1].coords("surface_altitude")), 1) + assert len(cubes[1].aux_factories) == 1 + assert len(cubes[1].coords("surface_altitude")) == 1 -class Test_scalar_cell_method(tests.IrisTest): +class Test_scalar_cell_method: """Tests for iris.fileformats.rules.scalar_cell_method() function.""" - def setUp(self): + def setup_method(self): self.cube = stock.simple_2d() self.cm = CellMethod("mean", "foo", "1 hour") self.cube.cell_methods = (self.cm,) def test_cell_method_found(self): actual = scalar_cell_method(self.cube, "mean", "foo") - self.assertEqual(actual, self.cm) + assert actual == self.cm def test_method_different(self): actual = scalar_cell_method(self.cube, "average", "foo") - self.assertIsNone(actual) + assert actual is None def test_coord_name_different(self): actual = scalar_cell_method(self.cube, "average", "bar") - self.assertIsNone(actual) + assert actual is None def test_double_coord_fails(self): self.cube.cell_methods = ( CellMethod("mean", ("foo", "bar"), ("1 hour", "1 hour")), ) actual = scalar_cell_method(self.cube, "mean", "foo") - self.assertIsNone(actual) - - -if __name__ == "__main__": - tests.main() + assert actual is None diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index 6012f1fce8..bf23b39a5c 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -7,16 +7,14 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.fileformats._structured_array_identification import ( ArrayStructure, _UnstructuredArrayException, ) +from iris.tests._shared_utils import assert_array_equal def construct_nd(sub_array, sub_dim, shape): @@ -28,136 +26,136 @@ def construct_nd(sub_array, sub_dim, shape): return sub_array.reshape(sub_shape) * np.ones(shape) -class TestArrayStructure_from_array(tests.IrisTest): +class TestArrayStructure_from_array: def struct_from_arr(self, nd_array): return ArrayStructure.from_array(nd_array.flatten()) def test_1d_len_0(self): a = np.arange(0) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) + assert self.struct_from_arr(a) == ArrayStructure(1, a) def test_1d_len_1(self): a = np.arange(1) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) + assert self.struct_from_arr(a) == ArrayStructure(1, a) def test_1d(self): a = np.array([-1, 3, 1, 2]) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) + assert self.struct_from_arr(a) == ArrayStructure(1, a) def test_1d_ones(self): a = np.ones(10) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, [1])) + assert self.struct_from_arr(a) == ArrayStructure(1, [1]) def test_1d_range(self): a = np.arange(6) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, list(range(6)))) + assert self.struct_from_arr(a) == ArrayStructure(1, list(range(6))) def test_3d_ones(self): a = np.ones([10, 2, 1]) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, [1])) + assert self.struct_from_arr(a) == ArrayStructure(1, [1]) def test_1d_over_2d_first_dim_manual(self): sub = np.array([10, 10, 20, 20]) - self.assertEqual(self.struct_from_arr(sub), ArrayStructure(2, [10, 20])) + assert self.struct_from_arr(sub) == ArrayStructure(2, [10, 20]) def test_3d_first_dimension(self): flattened = np.array([1, 1, 1, 2, 2, 2]) - self.assertEqual( - ArrayStructure.from_array(flattened), ArrayStructure(3, [1, 2]) - ) + assert ArrayStructure.from_array(flattened) == ArrayStructure(3, [1, 2]) def test_1d_over_2d_first_dim(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 0, (4, 2)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(2, sub)) + assert self.struct_from_arr(a) == ArrayStructure(2, sub) def test_1d_over_2d_second_dim(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 1, (2, 4)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) + assert self.struct_from_arr(a) == ArrayStructure(1, sub) def test_1d_over_3d_first_dim(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 0, (4, 2, 3)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(6, sub)) + assert self.struct_from_arr(a) == ArrayStructure(6, sub) def test_1d_over_3d_second_dim(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 1, (2, 4, 3)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(3, sub)) + assert self.struct_from_arr(a) == ArrayStructure(3, sub) def test_1d_over_3d_third_dim(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 2, (3, 2, 4)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) + assert self.struct_from_arr(a) == ArrayStructure(1, sub) def test_irregular_3d(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 2, (3, 2, 4)) a[0, 0, 0] = 5 - self.assertEqual(self.struct_from_arr(a), None) + assert self.struct_from_arr(a) is None def test_repeated_3d(self): sub = np.array([-1, 3, 1, 2]) a = construct_nd(sub, 2, (3, 2, 4)) a[:, 0, 0] = 1 - self.assertEqual(self.struct_from_arr(a), None) + assert self.struct_from_arr(a) is None def test_rolled_3d(self): # Shift the 3D array on by one, making the array 1d. sub = np.arange(4) a = construct_nd(sub, 0, (4, 2, 3)) a = np.roll(a.flatten(), 1) - self.assertEqual(self.struct_from_arr(a), None) + assert self.struct_from_arr(a) is None def test_len_1_3d(self): # Setup a case which triggers an IndexError when identifying # the stride, but the result should still be correct. sub = np.arange(2) a = construct_nd(sub, 1, (1, 1, 1)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) + assert self.struct_from_arr(a) == ArrayStructure(1, sub) def test_not_an_array(self): # Support lists as an argument. - self.assertEqual( - ArrayStructure.from_array([1, 2, 3]), ArrayStructure(1, [1, 2, 3]) - ) + assert ArrayStructure.from_array([1, 2, 3]) == ArrayStructure(1, [1, 2, 3]) def test_multi_dim_array(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): ArrayStructure.from_array(np.arange(12).reshape(3, 4)) -class nd_array_and_dims_cases: +class TestNdarrayAndDimsCases: """Defines the test functionality for nd_array_and_dims. This class isn't actually the test case - see the C order and F order subclasses for those. """ + @pytest.fixture(params=["c", "f"], ids=["c_order", "f_order"], autouse=True) + def _order(self, request): + self.order = request.param + def test_scalar_len1_first_dim(self): struct = ArrayStructure(1, [1]) orig = np.array([1, 1, 1]) array, dims = struct.nd_array_and_dims(orig, (1, 3), order=self.order) - self.assertArrayEqual(array, [1]) - self.assertEqual(dims, ()) + assert_array_equal(array, [1]) + assert dims == () def test_scalar_non_len1_first_dim(self): struct = ArrayStructure(1, [1]) orig = np.array([1, 1, 1]) array, dims = struct.nd_array_and_dims(orig, (3, 1), order=self.order) - self.assertArrayEqual(array, [1]) - self.assertEqual(dims, ()) + assert_array_equal(array, [1]) + assert dims == () def test_single_vector(self): orig = construct_nd(np.array([1, 2]), 0, (2, 1, 3)) flattened = orig.flatten(order=self.order) struct = ArrayStructure.from_array(flattened) array, dims = struct.nd_array_and_dims(flattened, (2, 1, 3), order=self.order) - self.assertArrayEqual(array, [1, 2]) - self.assertEqual(dims, (0,)) + assert_array_equal(array, [1, 2]) + assert dims == (0,) def test_single_vector_3rd_dim(self): orig = construct_nd(np.array([1, 2, 3]), 2, (4, 1, 3)) @@ -165,8 +163,8 @@ def test_single_vector_3rd_dim(self): struct = ArrayStructure.from_array(flattened) array, dims = struct.nd_array_and_dims(flattened, (4, 1, 3), order=self.order) - self.assertArrayEqual(array, [1, 2, 3]) - self.assertEqual(dims, (2,)) + assert_array_equal(array, [1, 2, 3]) + assert dims == (2,) def test_orig_array_and_target_shape_inconsistent(self): # An array structure which has a length which is a product @@ -175,7 +173,7 @@ def test_orig_array_and_target_shape_inconsistent(self): orig = np.array([1, 1, 2, 2, 3, 3]) msg = "Original array and target shape do not match up." - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): struct.nd_array_and_dims(orig, (2, 3, 2), order=self.order) def test_array_bigger_than_expected(self): @@ -184,7 +182,7 @@ def test_array_bigger_than_expected(self): struct = ArrayStructure(2, [1, 2, 3, 4, 5, 6]) orig = np.array([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6]) - with self.assertRaises(_UnstructuredArrayException): + with pytest.raises(_UnstructuredArrayException): struct.nd_array_and_dims(orig, (2, 3, 2), order=self.order) def test_single_vector_extra_dimension(self): @@ -199,21 +197,5 @@ def test_single_vector_extra_dimension(self): array, dims = struct.nd_array_and_dims( input_array, (3, 1, 2, 1), order=self.order ) - self.assertArrayEqual(array, [[1, 101], [2, 102]]) - self.assertEqual(dims, (2,)) - - -class TestArrayStructure_nd_array_and_dims_f_order( - tests.IrisTest, nd_array_and_dims_cases -): - order = "f" - - -class TestArrayStructure_nd_array_and_dims_c_order( - tests.IrisTest, nd_array_and_dims_cases -): - order = "c" - - -if __name__ == "__main__": - tests.main() + assert_array_equal(array, [[1, 101], [2, 102]]) + assert dims == (2,) diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index 868f37a1a8..ef2d1d2e75 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -12,11 +12,13 @@ import iris.tests as tests # isort:skip import numpy as np +import pytest from iris.fileformats._structured_array_identification import ( ArrayStructure, GroupStructure, ) +from iris.tests._shared_utils import assert_array_equal def regular_array_structures(shape, names="abcdefg"): @@ -34,7 +36,7 @@ class TestGroupStructure_from_component_arrays(tests.IrisTest): def test_different_sizes(self): arrays = {"a": np.arange(6), "b": np.arange(5)} msg = "All array elements must have the same size." - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): GroupStructure.from_component_arrays(arrays) def test_structure_creation(self): @@ -45,8 +47,8 @@ def test_structure_creation(self): grp = GroupStructure.from_component_arrays({"a": array}) - self.assertEqual(grp.length, 6) - self.assertEqual(grp._cmpt_structure, expected_structure) + assert grp.length == 6 + assert grp._cmpt_structure == expected_structure class TestGroupStructure_possible_structures(tests.IrisTest): @@ -66,7 +68,7 @@ def test_simple_3d_structure(self): ("c", array_structures["c"]), ], ) - self.assertEqual(structure.possible_structures(), expected) + assert structure.possible_structures() == expected def assert_potentials(self, length, array_structures, expected): structure = GroupStructure(length, array_structures, array_order="f") @@ -74,7 +76,7 @@ def assert_potentials(self, length, array_structures, expected): names = [ [name for (name, _) in allowed_structure] for allowed_structure in allowed ] - self.assertEqual(names, expected) + assert names == expected def test_multiple_potentials(self): # More than one potential dimension for dim 1. @@ -116,8 +118,8 @@ class TestGroupStructure_build_arrays(tests.IrisTest): def assert_built_array(self, name, result, expected): ex_arr, ex_dims = expected re_arr, re_dims = result[name] - self.assertEqual(ex_dims, re_dims) - self.assertArrayEqual(ex_arr, re_arr) + assert ex_dims == re_dims + assert_array_equal(ex_arr, re_arr) def test_build_arrays_regular_f_order(self): # Construct simple orthogonal 1d array structures, adding a trailing @@ -181,7 +183,3 @@ def test_structured_array_not_applicable(self): }, ) self.assert_built_array("d", r, (expected, (0, 1, 2))) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py index 9b9929a156..1562a28e82 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py @@ -4,14 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.as_concrete_data`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np import numpy.ma as ma from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data +from iris.tests._shared_utils import assert_array_equal, assert_masked_array_equal class MyProxy: @@ -25,58 +22,54 @@ def __getitem__(self, keys): return self.a[keys] -class Test_as_concrete_data(tests.IrisTest): +class Test_as_concrete_data: def test_concrete_input_data(self): data = np.arange(24).reshape((4, 6)) result = as_concrete_data(data) - self.assertIs(data, result) - self.assertFalse(is_lazy_data(result)) + assert data is result + assert not is_lazy_data(result) def test_concrete_masked_input_data(self): data = ma.masked_array([10, 12, 8, 2], mask=[True, True, False, True]) result = as_concrete_data(data) - self.assertIs(data, result) - self.assertFalse(is_lazy_data(result)) + assert data is result + assert not is_lazy_data(result) def test_lazy_data(self): data = np.arange(24).reshape((2, 12)) lazy_array = as_lazy_data(data) - self.assertTrue(is_lazy_data(lazy_array)) + assert is_lazy_data(lazy_array) result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, data) + assert not is_lazy_data(result) + assert_array_equal(result, data) def test_lazy_mask_data(self): data = np.arange(24).reshape((2, 12)) fill_value = 1234 mask_data = ma.masked_array(data, fill_value=fill_value) lazy_array = as_lazy_data(mask_data) - self.assertTrue(is_lazy_data(lazy_array)) + assert is_lazy_data(lazy_array) result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertMaskedArrayEqual(result, mask_data) - self.assertEqual(result.fill_value, fill_value) + assert not is_lazy_data(result) + assert_masked_array_equal(result, mask_data) + assert result.fill_value == fill_value def test_lazy_scalar_proxy(self): a = np.array(5) proxy = MyProxy(a) meta = np.empty((0,) * proxy.ndim, dtype=proxy.dtype) lazy_array = as_lazy_data(proxy, meta=meta) - self.assertTrue(is_lazy_data(lazy_array)) + assert is_lazy_data(lazy_array) result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertEqual(result, a) + assert not is_lazy_data(result) + assert result == a def test_lazy_scalar_proxy_masked(self): a = np.ma.masked_array(5, True) proxy = MyProxy(a) meta = np.ma.array(np.empty((0,) * proxy.ndim, dtype=proxy.dtype), mask=True) lazy_array = as_lazy_data(proxy, meta=meta) - self.assertTrue(is_lazy_data(lazy_array)) + assert is_lazy_data(lazy_array) result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertMaskedArrayEqual(result, a) - - -if __name__ == "__main__": - tests.main() + assert not is_lazy_data(result) + assert_masked_array_equal(result, a) diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 90166b5e78..821370ce6c 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -4,56 +4,53 @@ # See LICENSE in the root of the repository for full licensing details. """Test the function :func:`iris._lazy data.as_lazy_data`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from unittest import mock import dask.array as da import dask.config import numpy as np import numpy.ma as ma +import pytest from iris._lazy_data import _optimum_chunksize, as_lazy_data -class Test_as_lazy_data(tests.IrisTest): +class Test_as_lazy_data: def test_lazy(self): data = da.from_array(np.arange(24).reshape((2, 3, 4)), chunks="auto") result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) + assert isinstance(result, da.core.Array) def test_real(self): data = np.arange(24).reshape((2, 3, 4)) result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) + assert isinstance(result, da.core.Array) def test_masked(self): data = np.ma.masked_greater(np.arange(24), 10) result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) + assert isinstance(result, da.core.Array) def test_non_default_chunks(self): data = np.arange(24) chunks = (12,) lazy_data = as_lazy_data(data, chunks=chunks) (result,) = np.unique(lazy_data.chunks) - self.assertEqual(result, 24) + assert result == 24 - def test_dask_chunking(self): + def test_dask_chunking(self, mocker): data = np.arange(24) chunks = (12,) - optimum = self.patch("iris._lazy_data._optimum_chunksize") + optimum = mocker.patch("iris._lazy_data._optimum_chunksize") optimum.return_value = chunks _ = as_lazy_data(data, chunks="auto") - self.assertFalse(optimum.called) + assert not optimum.called def test_with_masked_constant(self): masked_data = ma.masked_array([8], mask=True) masked_constant = masked_data[0] result = as_lazy_data(masked_constant) - self.assertIsInstance(result, da.core.Array) + assert isinstance(result, da.core.Array) def test_missing_meta(self): class MyProxy: @@ -61,15 +58,15 @@ class MyProxy: data = MyProxy() - with self.assertRaisesRegex( + with pytest.raises( ValueError, - r"`meta` cannot be `None` if `data` is anything other than a Numpy " + match=r"For performance reasons, `meta` cannot be `None` if `data` is anything other than a Numpy " r"or Dask array.", ): as_lazy_data(data) -class Test__optimised_chunks(tests.IrisTest): +class Test__optimised_chunks: # Stable, known chunksize for testing. FIXED_CHUNKSIZE_LIMIT = 1024 * 1024 * 64 @@ -91,7 +88,7 @@ def test_chunk_size_limiting(self): for shape, expected in given_shapes_and_resulting_chunks: chunks = _optimum_chunksize(shape, shape, limit=self.FIXED_CHUNKSIZE_LIMIT) msg = err_fmt.format(shape, chunks, expected) - self.assertEqual(chunks, expected, msg) + assert chunks == expected, msg def test_chunk_size_expanding(self): # Check the expansion of small chunks, (with a known size limit). @@ -109,7 +106,7 @@ def test_chunk_size_expanding(self): chunks=shape, shape=fullshape, limit=self.FIXED_CHUNKSIZE_LIMIT ) msg = err_fmt.format(fullshape, shape, chunks, expected) - self.assertEqual(chunks, expected, msg) + assert chunks == expected, msg def test_chunk_expanding_equal_division(self): # Check that expansion chooses equal chunk sizes as far as possible. @@ -147,41 +144,34 @@ def test_chunk_expanding_equal_division(self): chunks=chunks, shape=shape, limit=limit, dtype=np.dtype("b1") ) msg = err_fmt_main.format(chunks, shape, limit, result, expected_result) - self.assertEqual(result, expected_result, msg) + assert result == expected_result, msg def test_default_chunksize(self): # Check that the "ideal" chunksize is taken from the dask config. with dask.config.set({"array.chunk-size": "20b"}): chunks = _optimum_chunksize((1, 8), shape=(400, 20), dtype=np.dtype("f4")) - self.assertEqual(chunks, (1, 4)) + assert chunks == (1, 4) - def test_default_chunks_limiting(self): + def test_default_chunks_limiting(self, mocker): # Check that chunking is still controlled when no specific 'chunks' # is passed. - limitcall_patch = self.patch("iris._lazy_data._optimum_chunksize") + limitcall_patch = mocker.patch("iris._lazy_data._optimum_chunksize") test_shape = (3, 2, 4) data = self._dummydata(test_shape) as_lazy_data(data) - self.assertEqual( - limitcall_patch.call_args_list, - [ - mock.call( - list(test_shape), - shape=test_shape, - dtype=np.dtype("f4"), - dims_fixed=None, - ) - ], - ) + assert limitcall_patch.call_args_list == [ + mock.call( + list(test_shape), + shape=test_shape, + dtype=np.dtype("f4"), + dims_fixed=None, + ) + ] - def test_shapeless_data(self): + def test_shapeless_data(self, mocker): # Check that chunk optimisation is skipped if shape contains a zero. - limitcall_patch = self.patch("iris._lazy_data._optimum_chunksize") + limitcall_patch = mocker.patch("iris._lazy_data._optimum_chunksize") test_shape = (2, 1, 0, 2) data = self._dummydata(test_shape) as_lazy_data(data, chunks=test_shape) - self.assertFalse(limitcall_patch.called) - - -if __name__ == "__main__": - tests.main() + assert not limitcall_patch.called diff --git a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py index 4e304d4910..ecc969cc2c 100644 --- a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py +++ b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py @@ -4,14 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.co_realise_cubes`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris._lazy_data import as_lazy_data, co_realise_cubes from iris.cube import Cube +from iris.tests._shared_utils import assert_array_all_close class ArrayAccessCounter: @@ -28,7 +25,7 @@ def __getitem__(self, keys): return self._array[keys] -class Test_co_realise_cubes(tests.IrisTest): +class Test_co_realise_cubes: def test_empty(self): # Ensure that 'no args' case does not raise an error. co_realise_cubes() @@ -37,8 +34,8 @@ def test_basic(self): real_data = np.arange(3.0) cube = Cube(as_lazy_data(real_data)) co_realise_cubes(cube) - self.assertFalse(cube.has_lazy_data()) - self.assertArrayAllClose(cube.core_data(), real_data) + assert not cube.has_lazy_data() + assert_array_all_close(cube.core_data(), real_data) def test_multi(self): real_data = np.arange(3.0) @@ -48,11 +45,11 @@ def test_multi(self): result_b = cube_inner + 1 co_realise_cubes(result_a, result_b) # Check that target cubes were realised. - self.assertFalse(result_a.has_lazy_data()) - self.assertFalse(result_b.has_lazy_data()) + assert not result_a.has_lazy_data() + assert not result_b.has_lazy_data() # Check that other cubes referenced remain lazy. - self.assertTrue(cube_base.has_lazy_data()) - self.assertTrue(cube_inner.has_lazy_data()) + assert cube_base.has_lazy_data() + assert cube_inner.has_lazy_data() def test_combined_access(self): wrapped_array = ArrayAccessCounter(np.arange(3.0)) @@ -74,8 +71,4 @@ def test_combined_access(self): # access with no data payload to ascertain the metadata associated with # the dask.array (this access is specific to dask 2+, # see dask.array.utils.meta_from_array). - self.assertEqual(wrapped_array.access_count, 1) - - -if __name__ == "__main__": - tests.main() + assert wrapped_array.access_count == 1 diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py index a8018c67b1..ca61460710 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py @@ -4,26 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.is_lazy_data`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import dask.array as da import numpy as np from iris._lazy_data import is_lazy_data -class Test_is_lazy_data(tests.IrisTest): +class Test_is_lazy_data: def test_lazy(self): values = np.arange(30).reshape((2, 5, 3)) lazy_array = da.from_array(values, chunks="auto") - self.assertTrue(is_lazy_data(lazy_array)) + assert is_lazy_data(lazy_array) def test_real(self): real_array = np.arange(24).reshape((2, 3, 4)) - self.assertFalse(is_lazy_data(real_array)) - - -if __name__ == "__main__": - tests.main() + assert not is_lazy_data(real_array) diff --git a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py index 651a774c4d..1600067d79 100644 --- a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py +++ b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py @@ -4,13 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.lazy_elementwise`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris._lazy_data import as_lazy_data, is_lazy_data, lazy_elementwise +from iris.tests._shared_utils import assert_array_all_close def _test_elementwise_op(array): @@ -18,32 +15,26 @@ def _test_elementwise_op(array): return array + 1 -class Test_lazy_elementwise(tests.IrisTest): +class Test_lazy_elementwise: def test_basic(self): concrete_array = np.arange(30).reshape((2, 5, 3)) lazy_array = as_lazy_data(concrete_array) wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertArrayAllClose( - wrapped.compute(), _test_elementwise_op(concrete_array) - ) + assert is_lazy_data(wrapped) + assert_array_all_close(wrapped.compute(), _test_elementwise_op(concrete_array)) def test_dtype_same(self): concrete_array = np.array([3.0], dtype=np.float16) lazy_array = as_lazy_data(concrete_array) wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertEqual(wrapped.dtype, np.float16) - self.assertEqual(wrapped.compute().dtype, np.float16) + assert is_lazy_data(wrapped) + assert wrapped.dtype == np.float16 + assert wrapped.compute().dtype == np.float16 def test_dtype_change(self): concrete_array = np.array([True, False]) lazy_array = as_lazy_data(concrete_array) wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertEqual(wrapped.dtype, np.int_) - self.assertEqual(wrapped.compute().dtype, wrapped.dtype) - - -if __name__ == "__main__": - tests.main() + assert is_lazy_data(wrapped) + assert wrapped.dtype == np.int_ + assert wrapped.compute().dtype == wrapped.dtype diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py index 7d619353ed..be25ab6c09 100644 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py @@ -4,33 +4,30 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.map_complete_blocks`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest +from unittest.mock import Mock, PropertyMock import dask.array as da import numpy as np from iris._lazy_data import is_lazy_data, map_complete_blocks +from iris.tests._shared_utils import assert_array_equal def create_mock_cube(array): - cube = unittest.mock.Mock() - cube_data = unittest.mock.PropertyMock(return_value=array) + cube = Mock() + cube_data = PropertyMock(return_value=array) type(cube).data = cube_data cube.dtype = array.dtype - cube.has_lazy_data = unittest.mock.Mock(return_value=is_lazy_data(array)) - cube.lazy_data = unittest.mock.Mock(return_value=array) + cube.has_lazy_data = Mock(return_value=is_lazy_data(array)) + cube.lazy_data = Mock(return_value=array) cube.shape = array.shape # Remove compute so cube is not interpreted as dask array. del cube.compute return cube, cube_data -class Test_map_complete_blocks(tests.IrisTest): - def setUp(self): +class Test_map_complete_blocks: + def setup_method(self): self.array = np.arange(8).reshape(2, 4) def func(chunk): @@ -53,8 +50,8 @@ def test_non_lazy_input(self): result = map_complete_blocks( cube, self.func, dims=(1,), out_sizes=(4,), dtype=self.array.dtype ) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) + assert not is_lazy_data(result) + assert_array_equal(result, self.func_result) # check correct data was accessed cube.lazy_data.assert_not_called() cube_data.assert_called_once() @@ -65,8 +62,8 @@ def test_lazy_input(self): result = map_complete_blocks( cube, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), self.func_result) + assert is_lazy_data(result) + assert_array_equal(result.compute(), self.func_result) # check correct data was accessed cube.lazy_data.assert_called_once() cube_data.assert_not_called() @@ -76,17 +73,17 @@ def test_dask_array_input(self): result = map_complete_blocks( lazy_array, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), self.func_result) + assert is_lazy_data(result) + assert_array_equal(result.compute(), self.func_result) def test_dask_masked_array_input(self): array = da.ma.masked_array(np.arange(2), mask=np.arange(2)) result = map_complete_blocks( array, self.func, dims=tuple(), out_sizes=tuple(), dtype=array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertTrue(isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray)) - self.assertArrayEqual(result.compute(), np.ma.masked_array([1, 2], mask=[0, 1])) + assert is_lazy_data(result) + assert isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray) + assert_array_equal(result.compute(), np.ma.masked_array([1, 2], mask=[0, 1])) def test_dask_array_input_with_different_output_dtype(self): lazy_array = da.ma.masked_array(self.array, chunks=((1, 1), (4,))) @@ -100,10 +97,10 @@ def func(chunk): result = map_complete_blocks( lazy_array, func, dims=(1,), out_sizes=(4,), dtype=dtype ) - self.assertTrue(isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray)) - self.assertTrue(result.dtype == dtype) - self.assertTrue(result.compute().dtype == dtype) - self.assertArrayEqual(result.compute(), self.func_result) + assert isinstance(da.utils.meta_from_array(result), np.ma.MaskedArray) + assert result.dtype == dtype + assert result.compute().dtype == dtype + assert_array_equal(result.compute(), self.func_result) def test_rechunk(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (2, 2))) @@ -111,8 +108,8 @@ def test_rechunk(self): result = map_complete_blocks( cube, self.func, dims=(1,), out_sizes=(4,), dtype=lazy_array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), self.func_result) + assert is_lazy_data(result) + assert_array_equal(result.compute(), self.func_result) def test_different_out_shape(self): lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) @@ -125,8 +122,8 @@ def func(_): result = map_complete_blocks( cube, func, dims=(1,), out_sizes=(2,), dtype=lazy_array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), func_result) + assert is_lazy_data(result) + assert_array_equal(result.compute(), func_result) def test_multidimensional_input(self): array = np.arange(2 * 3 * 4).reshape(2, 3, 4) @@ -135,9 +132,5 @@ def test_multidimensional_input(self): result = map_complete_blocks( cube, self.func, dims=(1, 2), out_sizes=(3, 4), dtype=lazy_array.dtype ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), array + 1) - - -if __name__ == "__main__": - tests.main() + assert is_lazy_data(result) + assert_array_equal(result.compute(), array + 1) diff --git a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py index 993cb01178..93e04659fa 100644 --- a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py +++ b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py @@ -4,17 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.multidim_lazy_stack`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import dask.array as da import numpy as np from iris._lazy_data import as_concrete_data, as_lazy_data, multidim_lazy_stack +from iris.tests._shared_utils import assert_array_all_close -class Test_multidim_lazy_stack(tests.IrisTest): +class Test_multidim_lazy_stack: def _check(self, stack_shape): vals = np.arange(np.prod(stack_shape)).reshape(stack_shape) stack = np.empty(stack_shape, "object") @@ -26,10 +23,10 @@ def _check(self, stack_shape): expected[index] = val result = multidim_lazy_stack(stack) - self.assertEqual(result.shape, stack_shape + stack_element_shape) - self.assertIsInstance(result, da.core.Array) + assert result.shape == stack_shape + stack_element_shape + assert isinstance(result, da.core.Array) result = as_concrete_data(result) - self.assertArrayAllClose(result, expected) + assert_array_all_close(result, expected) def test_0d_lazy_stack(self): shape = () @@ -42,7 +39,3 @@ def test_1d_lazy_stack(self): def test_2d_lazy_stack(self): shape = (3, 2) self._check(shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_non_lazy.py b/lib/iris/tests/unit/lazy_data/test_non_lazy.py index 3c6bb99e0a..9fa496d56f 100644 --- a/lib/iris/tests/unit/lazy_data/test_non_lazy.py +++ b/lib/iris/tests/unit/lazy_data/test_non_lazy.py @@ -4,17 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris._lazy data.non_lazy`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris._lazy_data import as_lazy_data, is_lazy_data, non_lazy +from iris.tests._shared_utils import assert_array_equal -class Test_non_lazy(tests.IrisTest): - def setUp(self): +class Test_non_lazy: + def setup_method(self): self.array = np.arange(8).reshape(2, 4) self.lazy_array = as_lazy_data(self.array) self.func = non_lazy(lambda array: array.sum(axis=0)) @@ -22,15 +19,11 @@ def setUp(self): def test_lazy_input(self): result = self.func(self.lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) + assert not is_lazy_data(result) + assert_array_equal(result, self.func_result) def test_non_lazy_input(self): # Check that a non-lazy input doesn't trip up the functionality. result = self.func(self.array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) - - -if __name__ == "__main__": - tests.main() + assert not is_lazy_data(result) + assert_array_equal(result, self.func_result) diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py index c262d014f3..3438d6884a 100644 --- a/lib/iris/tests/unit/plot/__init__.py +++ b/lib/iris/tests/unit/plot/__init__.py @@ -4,19 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.plot` module.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.coords import AuxCoord from iris.plot import _broadcast_2d as broadcast +from iris.tests import _shared_utils from iris.tests.stock import lat_lon_cube, simple_2d -@tests.skip_plot -class TestGraphicStringCoord(tests.GraphicsTest): - def setUp(self): - super().setUp() +@_shared_utils.skip_plot +class TestGraphicStringCoord(_shared_utils.GraphicsTest): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = simple_2d(with_bounds=True) self.cube.add_aux_coord(AuxCoord(list("abcd"), long_name="str_coord"), 1) self.lat_lon_cube = lat_lon_cube() @@ -38,7 +37,7 @@ def tick_loc_and_label(self, axis_name, axes=None): labels = [tick.get_text() for tick in axis.get_ticklabels()] return list(zip(locations, labels)) - def assertBoundsTickLabels(self, axis, axes=None): + def assert_bounds_tick_labels(self, axis, axes=None): actual = self.tick_loc_and_label(axis, axes) expected = [ (-1.0, ""), @@ -48,15 +47,15 @@ def assertBoundsTickLabels(self, axis, axes=None): (3.0, "d"), (4.0, ""), ] - self.assertEqual(expected, actual) + assert expected == actual - def assertPointsTickLabels(self, axis, axes=None): + def assert_points_tick_labels(self, axis, axes=None): actual = self.tick_loc_and_label(axis, axes) expected = [(0.0, "a"), (1.0, "b"), (2.0, "c"), (3.0, "d")] - self.assertEqual(expected, actual) + assert expected == actual -@tests.skip_plot +@_shared_utils.skip_plot class MixinCoords: """Mixin class of common plotting tests providing 2-dimensional permutations of coordinates and anonymous dimensions. @@ -64,14 +63,14 @@ class MixinCoords: """ def _check(self, u, v, data=None): - self.assertEqual(self.mpl_patch.call_count, 1) + assert self.mpl_patch.call_count == 1 if data is not None: (actual_u, actual_v, actual_data), _ = self.mpl_patch.call_args - self.assertArrayEqual(actual_data, data) + _shared_utils.assert_array_equal(actual_data, data) else: (actual_u, actual_v), _ = self.mpl_patch.call_args - self.assertArrayEqual(actual_u, u) - self.assertArrayEqual(actual_v, v) + _shared_utils.assert_array_equal(actual_u, u) + _shared_utils.assert_array_equal(actual_v, v) def test_foo_bar(self): self.draw_func(self.cube, coords=("foo", "bar")) diff --git a/lib/iris/tests/unit/plot/_blockplot_common.py b/lib/iris/tests/unit/plot/_blockplot_common.py index 04a7d8866f..a7abd9dd23 100644 --- a/lib/iris/tests/unit/plot/_blockplot_common.py +++ b/lib/iris/tests/unit/plot/_blockplot_common.py @@ -4,13 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Common test code for `iris.plot.pcolor` and `iris.plot.pcolormesh`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords @@ -23,11 +18,11 @@ class MixinStringCoordPlot: # and defines "self.blockplot_func()", to return the `iris.plot` function. def test_yaxis_labels(self): self.blockplot_func()(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): self.blockplot_func()(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -37,7 +32,7 @@ def test_xaxis_labels_with_axes(self): ax.set_xlim(0, 3) self.blockplot_func()(self.cube, coords=("str_coord", "bar"), axes=ax) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -47,23 +42,23 @@ def test_yaxis_labels_with_axes(self): ax.set_ylim(0, 3) self.blockplot_func()(self.cube, axes=ax, coords=("bar", "str_coord")) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_geoaxes_exception(self): import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises(TypeError, self.blockplot_func(), self.lat_lon_cube, axes=ax) + pytest.raises(TypeError, self.blockplot_func(), self.lat_lon_cube, axes=ax) plt.close(fig) class Mixin2dCoordsPlot(MixinCoords): # Mixin for common coordinate tests on pcolor/pcolormesh. # To use, make a class that inherits from this *and* - # :class:`iris.tests.IrisTest`, - # and defines "self.blockplot_func()", to return the `iris.plot` function. - def blockplot_setup(self): + # defines "self.blockplot_func()", to return the `iris.plot` function. + @pytest.fixture(autouse=True) + def _blockplot_setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=True) coord = self.cube.coord("foo") @@ -76,22 +71,21 @@ def blockplot_setup(self): self.dataT = self.data.T self.draw_func = self.blockplot_func() patch_target_name = "matplotlib.pyplot." + self.draw_func.__name__ - self.mpl_patch = self.patch(patch_target_name) + self.mpl_patch = mocker.patch(patch_target_name) class Mixin2dCoordsContigTol: # Mixin for contiguity tolerance argument to pcolor/pcolormesh. # To use, make a class that inherits from this *and* - # :class:`iris.tests.IrisTest`, - # and defines "self.blockplot_func()", to return the `iris.plot` function, + # defines "self.blockplot_func()", to return the `iris.plot` function, # and defines "self.additional_kwargs" for expected extra call args. - def test_contig_tol(self): + def test_contig_tol(self, mocker): # Patch the inner call to ensure contiguity_tolerance is passed. - cube_argument = mock.sentinel.passed_arg - expected_result = mock.sentinel.returned_value - blockplot_patch = self.patch( + cube_argument = mocker.sentinel.passed_arg + expected_result = mocker.sentinel.returned_value + blockplot_patch = mocker.patch( "iris.plot._draw_2d_from_bounds", - mock.Mock(return_value=expected_result), + mocker.Mock(return_value=expected_result), ) # Make the call draw_func = self.blockplot_func() @@ -99,19 +93,12 @@ def test_contig_tol(self): result = draw_func(cube_argument, contiguity_tolerance=0.0123) drawfunc_name = draw_func.__name__ # Check details of the call that was made. - self.assertEqual( - blockplot_patch.call_args_list, - [ - mock.call( - drawfunc_name, - cube_argument, - contiguity_tolerance=0.0123, - **other_kwargs, - ) - ], - ) - self.assertEqual(result, expected_result) - - -if __name__ == "__main__": - tests.main() + assert blockplot_patch.call_args_list == [ + mocker.call( + drawfunc_name, + cube_argument, + contiguity_tolerance=0.0123, + **other_kwargs, + ) + ] + assert result == expected_result diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py index 9ec80cbd50..3b23945118 100644 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py @@ -6,22 +6,18 @@ function. """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np import numpy.ma as ma +import pytest from iris.coords import DimCoord from iris.plot import _check_bounds_contiguity_and_mask +from iris.tests import _shared_utils from iris.tests.stock import make_bounds_discontiguous_at_point, sample_2d_latlons -@tests.skip_plot -class Test_check_bounds_contiguity_and_mask(tests.IrisTest): +@_shared_utils.skip_plot +class Test_check_bounds_contiguity_and_mask: def test_1d_not_checked(self): # Test a 1D coordinate, which is not checked as atol is not set. coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [5, 6]]) @@ -51,7 +47,7 @@ def test_1d_discontigous_unmasked(self): "coordinate are not contiguous and data is not masked where " "the discontiguity occurs" ) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): _check_bounds_contiguity_and_mask(coord, data, atol=1e-3) def test_2d_contiguous(self): @@ -60,18 +56,14 @@ def test_2d_contiguous(self): cube = sample_2d_latlons() _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data) - def test_2d_contiguous_atol(self): + def test_2d_contiguous_atol(self, mocker): # Check the atol is passed correctly. cube = sample_2d_latlons() - with mock.patch( - "iris.coords.Coord._discontiguity_in_bounds" - ) as discontiguity_check: - # Discontiguity returns two objects that are unpacked in - # `_check_bounds_contiguity_and_mask`. - discontiguity_check.return_value = [True, None] - _check_bounds_contiguity_and_mask( - cube.coord("longitude"), cube.data, atol=1e-3 - ) + discontiguity_check = mocker.patch("iris.coords.Coord._discontiguity_in_bounds") + # Discontiguity returns two objects that are unpacked in + # `_check_bounds_contiguity_and_mask`. + discontiguity_check.return_value = [True, None] + _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data, atol=1e-3) discontiguity_check.assert_called_with(atol=1e-3) def test_2d_discontigous_masked(self): @@ -88,9 +80,5 @@ def test_2d_discontigous_unmasked(self): make_bounds_discontiguous_at_point(cube, 3, 4) msg = "coordinate are not contiguous" cube.data[3, 4] = ma.nomask - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py index bf724c443c..c881d550df 100644 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py @@ -6,20 +6,19 @@ function. """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import Mock - from cartopy.crs import Geostationary, NearsidePerspective import numpy as np +import pytest from iris.plot import _check_geostationary_coords_and_convert +from iris.tests import _shared_utils + +class Test__check_geostationary_coords_and_convert: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.mocker = mocker -class Test__check_geostationary_coords_and_convert(tests.IrisTest): - def setUp(self): geostationary_altitude = 35785831.0 # proj4_params is the one attribute of the Geostationary class that # is needed for the function. @@ -46,7 +45,7 @@ def _test(self, geostationary=True): projection_spec = NearsidePerspective target_tuple = (self.x_original, self.y_original) - projection = Mock(spec=projection_spec) + projection = self.mocker.Mock(spec=projection_spec) projection.proj4_params = self.proj4_params # Projection is looked for within a dictionary called kwargs. kwargs = {"transform": projection} @@ -54,7 +53,7 @@ def _test(self, geostationary=True): x, y = _check_geostationary_coords_and_convert( self.x_original, self.y_original, kwargs ) - self.assertArrayEqual((x, y), target_tuple) + _shared_utils.assert_array_equal((x, y), target_tuple) def test_geostationary_present(self): self._test(geostationary=True) diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py index d155f30969..4f5b88faab 100644 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ b/lib/iris/tests/unit/plot/test__fixup_dates.py @@ -4,10 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._fixup_dates` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import datetime from cf_units import Unit @@ -15,34 +11,35 @@ from iris.coords import AuxCoord from iris.plot import _fixup_dates +from iris.tests import _shared_utils -class Test(tests.IrisTest): +class Test: def test_standard_calendar(self): unit = Unit("hours since 2000-04-13 00:00:00", calendar="standard") coord = AuxCoord([1, 3, 6], "time", units=unit) result = _fixup_dates(coord, coord.points) - self.assertIsInstance(result[0], datetime.datetime) + assert isinstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 1), datetime.datetime(2000, 4, 13, 3), datetime.datetime(2000, 4, 13, 6), ] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) def test_standard_calendar_sub_second(self): unit = Unit("seconds since 2000-04-13 00:00:00", calendar="standard") coord = AuxCoord([1, 1.25, 1.5], "time", units=unit) result = _fixup_dates(coord, coord.points) - self.assertIsInstance(result[0], datetime.datetime) + assert isinstance(result[0], datetime.datetime) expected = [ datetime.datetime(2000, 4, 13, 0, 0, 1), datetime.datetime(2000, 4, 13, 0, 0, 1), datetime.datetime(2000, 4, 13, 0, 0, 2), ] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) - @tests.skip_nc_time_axis + @_shared_utils.skip_nc_time_axis def test_360_day_calendar(self): calendar = "360_day" unit = Unit("days since 2000-02-25 00:00:00", calendar=calendar) @@ -53,9 +50,9 @@ def test_360_day_calendar(self): cftime.datetime(2000, 2, 29, calendar=calendar), cftime.datetime(2000, 2, 30, calendar=calendar), ] - self.assertArrayEqual(result, expected_datetimes) + _shared_utils.assert_array_equal(result, expected_datetimes) - @tests.skip_nc_time_axis + @_shared_utils.skip_nc_time_axis def test_365_day_calendar(self): calendar = "365_day" unit = Unit("minutes since 2000-02-25 00:00:00", calendar=calendar) @@ -66,16 +63,12 @@ def test_365_day_calendar(self): cftime.datetime(2000, 2, 25, 1, 0, calendar=calendar), cftime.datetime(2000, 2, 25, 2, 30, calendar=calendar), ] - self.assertArrayEqual(result, expected_datetimes) + _shared_utils.assert_array_equal(result, expected_datetimes) - @tests.skip_nc_time_axis + @_shared_utils.skip_nc_time_axis def test_360_day_calendar_attribute(self): calendar = "360_day" unit = Unit("days since 2000-02-01 00:00:00", calendar=calendar) coord = AuxCoord([0, 3, 6], "time", units=unit) result = _fixup_dates(coord, coord.points) - self.assertEqual(result[0].calendar, calendar) - - -if __name__ == "__main__": - tests.main() + assert result[0].calendar == calendar diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn.py b/lib/iris/tests/unit/plot/test__get_plot_defn.py index 4032c8792d..81d54f9716 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn.py @@ -4,35 +4,28 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_defn` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import iris.coords +from iris.tests import _shared_utils from iris.tests.stock import simple_2d, simple_2d_w_multidim_coords -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot -class Test_get_plot_defn(tests.IrisTest): +@_shared_utils.skip_plot +class Test_get_plot_defn: def test_axis_order_xy(self): cube_xy = simple_2d() defn = iplt._get_plot_defn(cube_xy, iris.coords.POINT_MODE) - self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) + assert [coord.name() for coord in defn.coords] == ["bar", "foo"] def test_axis_order_yx(self): cube_yx = simple_2d() cube_yx.transpose() defn = iplt._get_plot_defn(cube_yx, iris.coords.POINT_MODE) - self.assertEqual([coord.name() for coord in defn.coords], ["foo", "bar"]) + assert [coord.name() for coord in defn.coords] == ["foo", "bar"] def test_2d_coords(self): cube = simple_2d_w_multidim_coords() defn = iplt._get_plot_defn(cube, iris.coords.BOUND_MODE) - self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) - - -if __name__ == "__main__": - tests.main() + assert [coord.name() for coord in defn.coords] == ["bar", "foo"] diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py index 7b39043559..defae2ca86 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py @@ -6,69 +6,64 @@ function. """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.coords import BOUND_MODE, POINT_MODE +from iris.tests import _shared_utils from iris.tests.stock import hybrid_height, simple_2d, simple_2d_w_multidim_coords -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot -class Test_get_plot_defn_custom_coords_picked(tests.IrisTest): +@_shared_utils.skip_plot +class Test_get_plot_defn_custom_coords_picked: def test_1d_coords(self): cube = simple_2d() defn = iplt._get_plot_defn_custom_coords_picked( cube, ("foo", "bar"), POINT_MODE ) - self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) - self.assertFalse(defn.transpose) + assert [coord.name() for coord in defn.coords] == ["bar", "foo"] + assert not defn.transpose def test_1d_coords_swapped(self): cube = simple_2d() defn = iplt._get_plot_defn_custom_coords_picked( cube, ("bar", "foo"), POINT_MODE ) - self.assertEqual([coord.name() for coord in defn.coords], ["foo", "bar"]) - self.assertTrue(defn.transpose) + assert [coord.name() for coord in defn.coords] == ["foo", "bar"] + assert defn.transpose def test_1d_coords_as_integers(self): cube = simple_2d() defn = iplt._get_plot_defn_custom_coords_picked(cube, (1, 0), POINT_MODE) - self.assertEqual([coord for coord in defn.coords], [0, 1]) - self.assertFalse(defn.transpose) + assert [coord for coord in defn.coords] == [0, 1] + assert not defn.transpose def test_1d_coords_as_integers_swapped(self): cube = simple_2d() defn = iplt._get_plot_defn_custom_coords_picked(cube, (0, 1), POINT_MODE) - self.assertEqual([coord for coord in defn.coords], [1, 0]) - self.assertTrue(defn.transpose) + assert [coord for coord in defn.coords] == [1, 0] + assert defn.transpose def test_2d_coords(self): cube = simple_2d_w_multidim_coords() defn = iplt._get_plot_defn_custom_coords_picked( cube, ("foo", "bar"), BOUND_MODE ) - self.assertEqual([coord.name() for coord in defn.coords], ["bar", "foo"]) - self.assertFalse(defn.transpose) + assert [coord.name() for coord in defn.coords] == ["bar", "foo"] + assert not defn.transpose def test_2d_coords_as_integers(self): cube = simple_2d_w_multidim_coords() defn = iplt._get_plot_defn_custom_coords_picked(cube, (0, 1), BOUND_MODE) - self.assertEqual([coord for coord in defn.coords], [1, 0]) - self.assertTrue(defn.transpose) + assert [coord for coord in defn.coords] == [1, 0] + assert defn.transpose def test_span_check(self): cube = hybrid_height() emsg = "don't span the 2 data dimensions" - with self.assertRaisesRegex(ValueError, emsg): + with pytest.raises(ValueError, match=emsg): iplt._get_plot_defn_custom_coords_picked( cube, ("sigma", "level_height"), POINT_MODE ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__get_plot_objects.py b/lib/iris/tests/unit/plot/test__get_plot_objects.py index fbccbe94fb..a0ae48141a 100644 --- a/lib/iris/tests/unit/plot/test__get_plot_objects.py +++ b/lib/iris/tests/unit/plot/test__get_plot_objects.py @@ -4,41 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot._get_plot_objects` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest import iris.cube +from iris.tests import _shared_utils -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: from iris.plot import _get_plot_objects -@tests.skip_plot -class Test__get_plot_objects(tests.IrisTest): +@_shared_utils.skip_plot +class Test__get_plot_objects: def test_scalar(self): cube1 = iris.cube.Cube(1) cube2 = iris.cube.Cube(1) expected = (cube1, cube2, 1, 1, ()) result = _get_plot_objects((cube1, cube2)) - self.assertTupleEqual(expected, result) + assert result == expected def test_mismatched_size_first_scalar(self): cube1 = iris.cube.Cube(1) cube2 = iris.cube.Cube([1, 42]) - with self.assertRaisesRegex( - ValueError, "x and y-axis objects are not compatible" - ): + with pytest.raises(ValueError, match="x and y-axis objects are not compatible"): _get_plot_objects((cube1, cube2)) def test_mismatched_size_second_scalar(self): cube1 = iris.cube.Cube(1) cube2 = iris.cube.Cube([1, 42]) - with self.assertRaisesRegex( - ValueError, "x and y-axis objects are not compatible" - ): + with pytest.raises(ValueError, match="x and y-axis objects are not compatible"): _get_plot_objects((cube2, cube1)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py index cac42bb765..e8d4b6d1cd 100644 --- a/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py +++ b/lib/iris/tests/unit/plot/test__replace_axes_with_cartopy_axes.py @@ -4,20 +4,21 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.__replace_axes_with_cartopy_axes` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import cartopy.crs as ccrs import matplotlib.pyplot as plt +import pytest from iris.plot import _replace_axes_with_cartopy_axes +from iris.tests import _shared_utils -@tests.skip_plot -class Test_replace_axes_with_cartopy_axes(tests.IrisTest): - def setUp(self): +@_shared_utils.skip_plot +class Test_replace_axes_with_cartopy_axes: + @pytest.fixture(autouse=True) + def _setup(self): self.fig = plt.figure() + yield + plt.close(self.fig) def test_preserve_position(self): position = [0.17, 0.65, 0.2, 0.2] @@ -32,18 +33,11 @@ def test_preserve_position(self): # get_position returns mpl.transforms.Bbox object, for which equality does # not appear to be implemented. Compare the bounds (tuple) instead. - self.assertEqual(expected.get_position().bounds, result.get_position().bounds) + assert expected.get_position().bounds == result.get_position().bounds def test_ax_on_subfigure(self): subfig, _ = self.fig.subfigures(nrows=2) subfig.subplots() _replace_axes_with_cartopy_axes(ccrs.PlateCarree()) result = plt.gca() - self.assertIs(result.get_figure(), subfig) - - def tearDown(self): - plt.close(self.fig) - - -if __name__ == "__main__": - tests.main() + assert result.get_figure() is subfig diff --git a/lib/iris/tests/unit/plot/test_contour.py b/lib/iris/tests/unit/plot/test_contour.py index 43c0564ff4..e874c756ce 100644 --- a/lib/iris/tests/unit/plot/test_contour.py +++ b/lib/iris/tests/unit/plot/test_contour.py @@ -4,28 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.contour` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): iplt.contour(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") + self.assert_points_tick_labels("yaxis") def test_xaxis_labels(self): iplt.contour(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") + self.assert_points_tick_labels("xaxis") def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -34,7 +32,7 @@ def test_yaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.contour(self.cube, axes=ax, coords=("bar", "str_coord")) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -43,20 +41,21 @@ def test_xaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.contour(self.cube, axes=ax, coords=("str_coord", "bar")) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_geoaxes_exception(self): import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.contour, self.lat_lon_cube, axes=ax) + pytest.raises(TypeError, iplt.contour, self.lat_lon_cube, axes=ax) plt.close(fig) -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -65,9 +64,5 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.contour") + self.mpl_patch = mocker.patch("matplotlib.pyplot.contour") self.draw_func = iplt.contour - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py index 59fe631b67..6ee170f4c1 100644 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ b/lib/iris/tests/unit/plot/test_contourf.py @@ -4,31 +4,27 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.contourf` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import matplotlib.pyplot as plt import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): iplt.contourf(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") + self.assert_points_tick_labels("yaxis") def test_xaxis_labels(self): iplt.contourf(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") + self.assert_points_tick_labels("xaxis") def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -37,7 +33,7 @@ def test_yaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.contourf(self.cube, axes=ax, coords=("bar", "str_coord")) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -46,20 +42,21 @@ def test_xaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.contourf(self.cube, axes=ax, coords=("str_coord", "bar")) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_geoaxes_exception(self): import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.contourf, self.lat_lon_cube, axes=ax) + pytest.raises(TypeError, iplt.contourf, self.lat_lon_cube, axes=ax) plt.close(fig) -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -68,29 +65,31 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - mocker = mock.Mock(wraps=plt.contourf) - self.mpl_patch = self.patch("matplotlib.pyplot.contourf", mocker) + self.mpl_patch = mocker.patch("matplotlib.pyplot.contourf") self.draw_func = iplt.contourf -@tests.skip_plot -class TestAntialias(tests.IrisTest): - def setUp(self): +@_shared_utils.skip_plot +class TestAntialias: + @pytest.fixture(autouse=True) + def _setup(self): self.fig = plt.figure() + yield + plt.close(self.fig) - def test_skip_contour(self): + def test_skip_contour(self, mocker): # Contours should not be added if data is all below second level. See #4086. cube = simple_2d() levels = [5, 15, 20, 200] colors = ["b", "r", "y"] - with mock.patch("matplotlib.pyplot.contour") as mocked_contour: - iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) + mocked_contour = mocker.patch("matplotlib.pyplot.contour") + iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) mocked_contour.assert_not_called() - def test_apply_contour_nans(self): + def test_apply_contour_nans(self, mocker): # Presence of nans should not prevent contours being added. cube = simple_2d() cube.data = cube.data.astype(np.float64) @@ -99,14 +98,7 @@ def test_apply_contour_nans(self): levels = [2, 4, 6, 8] colors = ["b", "r", "y"] - with mock.patch("matplotlib.pyplot.contour") as mocked_contour: - iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) + mocked_contour = mocker.patch("matplotlib.pyplot.contour") + iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) mocked_contour.assert_called_once() - - def tearDown(self): - plt.close(self.fig) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_hist.py b/lib/iris/tests/unit/plot/test_hist.py index 9c1740587c..c4651e846e 100644 --- a/lib/iris/tests/unit/plot/test_hist.py +++ b/lib/iris/tests/unit/plot/test_hist.py @@ -4,47 +4,42 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.hist` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np import pytest from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube +from iris.tests import _shared_utils -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class Test: @pytest.fixture(autouse=True) - def create_data(self): + def _create_data(self): self.data = np.array([0, 100, 110, 120, 200, 320]) @pytest.mark.parametrize( "x", [AuxCoord, Cube, DimCoord, CellMeasure, AncillaryVariable] ) - def test_simple(self, x): - with mock.patch("matplotlib.pyplot.hist") as mocker: - iplt.hist(x(self.data)) + def test_simple(self, x, mocker): + mock_patch = mocker.patch("matplotlib.pyplot.hist") + iplt.hist(x(self.data)) # mocker.assert_called_once_with is not working as expected with - # _DimensionalMetadata objects so we use np.testing array equality + # _DimensionalMetadata objects so we use array equality # checks instead. - args, kwargs = mocker.call_args + args, kwargs = mock_patch.call_args assert len(args) == 1 - np.testing.assert_array_equal(args[0], self.data) + _shared_utils.assert_array_equal(args[0], self.data) - def test_kwargs(self): + def test_kwargs(self, mocker): cube = Cube(self.data) bins = [0, 150, 250, 350] - with mock.patch("matplotlib.pyplot.hist") as mocker: - iplt.hist(cube, bins=bins) - mocker.assert_called_once_with(self.data, bins=bins) + mock_patch = mocker.patch("matplotlib.pyplot.hist") + iplt.hist(cube, bins=bins) + mock_patch.assert_called_once_with(self.data, bins=bins) def test_unsupported_input(self): with pytest.raises(TypeError, match="x must be a"): diff --git a/lib/iris/tests/unit/plot/test_outline.py b/lib/iris/tests/unit/plot/test_outline.py index dc1b27487b..2c84a6c718 100644 --- a/lib/iris/tests/unit/plot/test_outline.py +++ b/lib/iris/tests/unit/plot/test_outline.py @@ -4,28 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.outline` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): iplt.outline(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): iplt.outline(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -35,7 +33,7 @@ def test_xaxis_labels_with_axes(self): ax.set_xlim(0, 3) iplt.outline(self.cube, coords=("str_coord", "bar"), axes=ax) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -45,20 +43,21 @@ def test_yaxis_labels_with_axes(self): ax.set_ylim(0, 3) iplt.outline(self.cube, axes=ax, coords=("bar", "str_coord")) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_geoaxes_exception(self): import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.outline, self.lat_lon_cube, axes=ax) + pytest.raises(TypeError, iplt.outline, self.lat_lon_cube, axes=ax) plt.close(fig) -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=True) coord = self.cube.coord("foo") @@ -69,9 +68,5 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh") + self.mpl_patch = mocker.patch("matplotlib.pyplot.pcolormesh") self.draw_func = iplt.outline - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_pcolor.py b/lib/iris/tests/unit/plot/test_pcolor.py index 219df4d446..27f6a7b6ed 100644 --- a/lib/iris/tests/unit/plot/test_pcolor.py +++ b/lib/iris/tests/unit/plot/test_pcolor.py @@ -4,10 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.pcolor` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - +from iris.tests import _shared_utils from iris.tests.unit.plot import TestGraphicStringCoord from iris.tests.unit.plot._blockplot_common import ( Mixin2dCoordsContigTol, @@ -15,35 +12,28 @@ MixinStringCoordPlot, ) -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt PLOT_FUNCTION_TO_TEST = iplt.pcolor -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(MixinStringCoordPlot, TestGraphicStringCoord): def blockplot_func(self): return PLOT_FUNCTION_TO_TEST -@tests.skip_plot -class Test2dCoords(tests.IrisTest, Mixin2dCoordsPlot): - def setUp(self): - self.blockplot_setup() - +@_shared_utils.skip_plot +class Test2dCoords(Mixin2dCoordsPlot): def blockplot_func(self): return PLOT_FUNCTION_TO_TEST -@tests.skip_plot -class Test2dContigTol(tests.IrisTest, Mixin2dCoordsContigTol): +@_shared_utils.skip_plot +class Test2dContigTol(Mixin2dCoordsContigTol): # Extra call kwargs expected. additional_kwargs = dict(antialiased=True, snap=False) def blockplot_func(self): return PLOT_FUNCTION_TO_TEST - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_pcolormesh.py b/lib/iris/tests/unit/plot/test_pcolormesh.py index dba3cce5c0..9a9fa19ae0 100644 --- a/lib/iris/tests/unit/plot/test_pcolormesh.py +++ b/lib/iris/tests/unit/plot/test_pcolormesh.py @@ -8,8 +8,7 @@ # importing anything else. from typing import Any -import iris.tests as tests # isort:skip - +from iris.tests import _shared_utils from iris.tests.unit.plot import TestGraphicStringCoord from iris.tests.unit.plot._blockplot_common import ( Mixin2dCoordsContigTol, @@ -17,35 +16,28 @@ MixinStringCoordPlot, ) -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt PLOT_FUNCTION_TO_TEST = iplt.pcolormesh -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(MixinStringCoordPlot, TestGraphicStringCoord): def blockplot_func(self): return PLOT_FUNCTION_TO_TEST -@tests.skip_plot -class Test2dCoords(tests.IrisTest, Mixin2dCoordsPlot): - def setUp(self): - self.blockplot_setup() - +@_shared_utils.skip_plot +class Test2dCoords(Mixin2dCoordsPlot): def blockplot_func(self): return PLOT_FUNCTION_TO_TEST -@tests.skip_plot -class Test2dContigTol(tests.IrisTest, Mixin2dCoordsContigTol): +@_shared_utils.skip_plot +class Test2dContigTol(Mixin2dCoordsContigTol): # Extra call kwargs expected -- unlike 'pcolor', there are none. additional_kwargs: dict[str, Any] = {} def blockplot_func(self): return PLOT_FUNCTION_TO_TEST - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py index 6adf1c4cf5..76225557f0 100644 --- a/lib/iris/tests/unit/plot/test_plot.py +++ b/lib/iris/tests/unit/plot/test_plot.py @@ -4,17 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.plot` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest import iris.coord_systems as ics import iris.coords as coords +from iris.tests import _shared_utils from iris.tests.unit.plot import TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import cartopy.crs as ccrs import cartopy.mpl.geoaxes from matplotlib.path import Path @@ -23,20 +21,22 @@ import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() + parent_setup = TestGraphicStringCoord._setup + + @pytest.fixture(autouse=True) + def _setup(self, parent_setup): self.cube = self.cube[0, :] self.lat_lon_cube = self.lat_lon_cube[0, :] def test_yaxis_labels(self): iplt.plot(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): iplt.plot(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -45,7 +45,7 @@ def test_yaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.plot(self.cube, self.cube.coord("str_coord"), axes=ax) plt.close(fig) - self.assertBoundsTickLabels("yaxis", ax) + self.assert_bounds_tick_labels("yaxis", ax) def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -54,7 +54,7 @@ def test_xaxis_labels_with_axes(self): ax = fig.add_subplot(111) iplt.plot(self.cube.coord("str_coord"), self.cube, axes=ax) plt.close(fig) - self.assertBoundsTickLabels("xaxis", ax) + self.assert_bounds_tick_labels("xaxis", ax) def test_plot_longitude(self): import matplotlib.pyplot as plt @@ -65,14 +65,15 @@ def test_plot_longitude(self): plt.close(fig) -@tests.skip_plot -class TestTrajectoryWrap(tests.IrisTest): +@_shared_utils.skip_plot +class TestTrajectoryWrap: """Test that a line plot of geographic coordinates wraps around the end of the coordinates rather than plotting across the map. """ - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): plt.figure() self.geog_cs = ics.GeogCS(6371229.0) self.plate_carree = self.geog_cs.as_cartopy_projection() @@ -85,7 +86,7 @@ def lon_lat_coords(self, lons, lats, cs=None): coords.AuxCoord(lats, "latitude", units="degrees", coord_system=cs), ) - def assertPathsEqual(self, expected, actual): + def assert_paths_equal(self, expected, actual): """Assert that the given paths are equal once STOP vertices have been removed. @@ -95,17 +96,15 @@ def assertPathsEqual(self, expected, actual): # Remove Path.STOP vertices everts = expected.vertices[np.where(expected.codes != Path.STOP)] averts = actual.vertices[np.where(actual.codes != Path.STOP)] - self.assertArrayAlmostEqual(everts, averts) - self.assertArrayEqual(expected.codes, actual.codes) + _shared_utils.assert_array_almost_equal(everts, averts) + _shared_utils.assert_array_equal(expected.codes, actual.codes) def check_paths(self, expected_path, expected_path_crs, lines, axes): """Check that the paths in `lines` match the given expected paths when plotted on the given geoaxes. """ - self.assertEqual( - 1, len(lines), "Expected a single line, got {}".format(len(lines)) - ) + assert 1 == len(lines), "Expected a single line, got {}".format(len(lines)) (line,) = lines inter_proj_transform = cartopy.mpl.geoaxes.InterProjectionTransform( expected_path_crs, axes.projection @@ -115,7 +114,7 @@ def check_paths(self, expected_path, expected_path_crs, lines, axes): expected = ax_transform.transform_path(expected_path) actual = line.get_transform().transform_path(line.get_path()) - self.assertPathsEqual(expected, actual) + self.assert_paths_equal(expected, actual) def test_simple(self): lon, lat = self.lon_lat_coords([359, 1], [0, 0]) @@ -255,7 +254,3 @@ def test_rotated(self): grid_north_pole_longitude=120, north_pole_grid_longitude=45, ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_points.py b/lib/iris/tests/unit/plot/test_points.py index 0d713e3d84..e8778ed30e 100644 --- a/lib/iris/tests/unit/plot/test_points.py +++ b/lib/iris/tests/unit/plot/test_points.py @@ -4,28 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.points` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): iplt.points(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): iplt.points(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -35,7 +33,7 @@ def test_xaxis_labels_with_axes(self): ax.set_xlim(0, 3) iplt.points(self.cube, coords=("str_coord", "bar"), axes=ax) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -45,20 +43,21 @@ def test_yaxis_labels_with_axes(self): ax.set_ylim(0, 3) iplt.points(self.cube, coords=("bar", "str_coord"), axes=ax) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_geoaxes_exception(self): import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.points, self.lat_lon_cube, axes=ax) + pytest.raises(TypeError, iplt.points, self.lat_lon_cube, axes=ax) plt.close(fig) -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -67,9 +66,5 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = None self.dataT = None - self.mpl_patch = self.patch("matplotlib.pyplot.scatter") + self.mpl_patch = mocker.patch("matplotlib.pyplot.scatter") self.draw_func = iplt.points - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_scatter.py b/lib/iris/tests/unit/plot/test_scatter.py index 21412010ab..9a0a7db96c 100644 --- a/lib/iris/tests/unit/plot/test_scatter.py +++ b/lib/iris/tests/unit/plot/test_scatter.py @@ -4,29 +4,31 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.plot.scatter` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest + +from iris.tests import _shared_utils from iris.tests.unit.plot import TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.plot as iplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() + parent_setup = TestGraphicStringCoord._setup + + @pytest.fixture(autouse=True) + def _setup(self, parent_setup): self.cube = self.cube[0, :] self.lat_lon_cube = self.lat_lon_cube[0, :] def test_xaxis_labels(self): iplt.scatter(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_yaxis_labels(self): iplt.scatter(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -36,7 +38,7 @@ def test_xaxis_labels_with_axes(self): ax.set_xlim(0, 3) iplt.scatter(self.cube.coord("str_coord"), self.cube, axes=ax) plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) + self.assert_points_tick_labels("xaxis", ax) def test_yaxis_labels_with_axes(self): import matplotlib.pyplot as plt @@ -46,7 +48,7 @@ def test_yaxis_labels_with_axes(self): ax.set_ylim(0, 3) iplt.scatter(self.cube, self.cube.coord("str_coord"), axes=ax) plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) + self.assert_points_tick_labels("yaxis", ax) def test_scatter_longitude(self): import matplotlib.pyplot as plt @@ -55,7 +57,3 @@ def test_scatter_longitude(self): ax = fig.add_subplot(111) iplt.scatter(self.lat_lon_cube, self.lat_lon_cube.coord("longitude"), axes=ax) plt.close(fig) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_contour.py b/lib/iris/tests/unit/quickplot/test_contour.py index 2f3bb1a45d..8d5fe7a01c 100644 --- a/lib/iris/tests/unit/quickplot/test_contour.py +++ b/lib/iris/tests/unit/quickplot/test_contour.py @@ -4,33 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.contour` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.contour(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") + self.assert_points_tick_labels("yaxis") def test_xaxis_labels(self): qplt.contour(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") + self.assert_points_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -39,9 +38,5 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.contour") + self.mpl_patch = mocker.patch("matplotlib.pyplot.contour") self.draw_func = qplt.contour - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_contourf.py b/lib/iris/tests/unit/quickplot/test_contourf.py index 55c9940821..91deb3b79e 100644 --- a/lib/iris/tests/unit/quickplot/test_contourf.py +++ b/lib/iris/tests/unit/quickplot/test_contourf.py @@ -4,36 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.contourf` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import matplotlib.pyplot as plt import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.contourf(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") + self.assert_points_tick_labels("yaxis") def test_xaxis_labels(self): qplt.contourf(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") + self.assert_points_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -42,12 +38,7 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = self.cube.data self.dataT = self.data.T - mocker = mock.Mock(wraps=plt.contourf) - self.mpl_patch = self.patch("matplotlib.pyplot.contourf", mocker) + self.mpl_patch = mocker.patch("matplotlib.pyplot.contourf") # Also need to mock the colorbar. - self.patch("matplotlib.pyplot.colorbar") + mocker.patch("matplotlib.pyplot.colorbar") self.draw_func = qplt.contourf - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_outline.py b/lib/iris/tests/unit/quickplot/test_outline.py index 4dd924b749..1d83561f9d 100644 --- a/lib/iris/tests/unit/quickplot/test_outline.py +++ b/lib/iris/tests/unit/quickplot/test_outline.py @@ -4,33 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.outline` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.outline(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): qplt.outline(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=True) coord = self.cube.coord("foo") @@ -41,9 +40,5 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh") + self.mpl_patch = mocker.patch("matplotlib.pyplot.pcolormesh") self.draw_func = qplt.outline - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_pcolor.py b/lib/iris/tests/unit/quickplot/test_pcolor.py index fc2ce83f0b..87d9b73530 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolor.py +++ b/lib/iris/tests/unit/quickplot/test_pcolor.py @@ -4,33 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.pcolor` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.pcolor(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): qplt.pcolor(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=True) coord = self.cube.coord("foo") @@ -41,9 +40,5 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolor", return_value=None) + self.mpl_patch = mocker.patch("matplotlib.pyplot.pcolor", return_value=None) self.draw_func = qplt.pcolor - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_pcolormesh.py b/lib/iris/tests/unit/quickplot/test_pcolormesh.py index 6ce9d07406..b9476c0c16 100644 --- a/lib/iris/tests/unit/quickplot/test_pcolormesh.py +++ b/lib/iris/tests/unit/quickplot/test_pcolormesh.py @@ -4,33 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.pcolormesh` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.pcolormesh(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): qplt.pcolormesh(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=True) coord = self.cube.coord("foo") @@ -41,9 +40,5 @@ def setUp(self): self.bar_index = np.arange(coord.points.size + 1) self.data = self.cube.data self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh", return_value=None) + self.mpl_patch = mocker.patch("matplotlib.pyplot.pcolormesh", return_value=None) self.draw_func = qplt.pcolormesh - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_plot.py b/lib/iris/tests/unit/quickplot/test_plot.py index 35e1eae470..6f4c71a4f1 100644 --- a/lib/iris/tests/unit/quickplot/test_plot.py +++ b/lib/iris/tests/unit/quickplot/test_plot.py @@ -4,40 +4,42 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.plot` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest + +from iris.tests import _shared_utils from iris.tests.stock import simple_1d from iris.tests.unit.plot import TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() + parent_setup = TestGraphicStringCoord._setup + + @pytest.fixture(autouse=True) + def _setup(self, parent_setup): self.cube = self.cube[0, :] def test_yaxis_labels(self): qplt.plot(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): qplt.plot(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") -class TestAxisLabels(tests.GraphicsTest): +class TestAxisLabels(_shared_utils.GraphicsTest): def test_xy_cube(self): c = simple_1d() qplt.plot(c) ax = qplt.plt.gca() x = ax.xaxis.get_label().get_text() - self.assertEqual(x, "Foo") + assert x == "Foo" y = ax.yaxis.get_label().get_text() - self.assertEqual(y, "Thingness") + assert y == "Thingness" def test_yx_cube(self): c = simple_1d() @@ -48,10 +50,6 @@ def test_yx_cube(self): qplt.plot(c) ax = qplt.plt.gca() x = ax.xaxis.get_label().get_text() - self.assertEqual(x, "Thingness") + assert x == "Thingness" y = ax.yaxis.get_label().get_text() - self.assertEqual(y, "Foo") - - -if __name__ == "__main__": - tests.main() + assert y == "Foo" diff --git a/lib/iris/tests/unit/quickplot/test_points.py b/lib/iris/tests/unit/quickplot/test_points.py index b28c37bf87..d55b3daadf 100644 --- a/lib/iris/tests/unit/quickplot/test_points.py +++ b/lib/iris/tests/unit/quickplot/test_points.py @@ -4,33 +4,32 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.points` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import simple_2d from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): def test_yaxis_labels(self): qplt.points(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") + self.assert_bounds_tick_labels("yaxis") def test_xaxis_labels(self): qplt.points(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): +@_shared_utils.skip_plot +class TestCoords(MixinCoords): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # We have a 2d cube with dimensionality (bar: 3; foo: 4) self.cube = simple_2d(with_bounds=False) self.foo = self.cube.coord("foo").points @@ -39,9 +38,5 @@ def setUp(self): self.bar_index = np.arange(self.bar.size) self.data = None self.dataT = None - self.mpl_patch = self.patch("matplotlib.pyplot.scatter") + self.mpl_patch = mocker.patch("matplotlib.pyplot.scatter") self.draw_func = qplt.points - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_scatter.py b/lib/iris/tests/unit/quickplot/test_scatter.py index db3e9948a0..e6c7177860 100644 --- a/lib/iris/tests/unit/quickplot/test_scatter.py +++ b/lib/iris/tests/unit/quickplot/test_scatter.py @@ -4,29 +4,27 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.quickplot.scatter` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest + +from iris.tests import _shared_utils from iris.tests.unit.plot import TestGraphicStringCoord -if tests.MPL_AVAILABLE: +if _shared_utils.MPL_AVAILABLE: import iris.quickplot as qplt -@tests.skip_plot +@_shared_utils.skip_plot class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() + parent_setup = TestGraphicStringCoord._setup + + @pytest.fixture(autouse=True) + def _setup(self, parent_setup): self.cube = self.cube[0, :] def test_xaxis_labels(self): qplt.scatter(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") + self.assert_bounds_tick_labels("xaxis") def test_yaxis_labels(self): qplt.scatter(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") - - -if __name__ == "__main__": - tests.main() + self.assert_bounds_tick_labels("yaxis") diff --git a/lib/iris/tests/unit/util/test__coord_regular.py b/lib/iris/tests/unit/util/test__coord_regular.py index a772833972..cbfe5074e6 100644 --- a/lib/iris/tests/unit/util/test__coord_regular.py +++ b/lib/iris/tests/unit/util/test__coord_regular.py @@ -12,99 +12,92 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.coords import AuxCoord, DimCoord from iris.exceptions import CoordinateMultiDimError, CoordinateNotRegularError from iris.util import is_regular, points_step, regular_step -class Test_is_regular(tests.IrisTest): +class Test_is_regular: def test_coord_with_regular_step(self): coord = DimCoord(np.arange(5)) result = is_regular(coord) - self.assertTrue(result) + assert result def test_coord_with_irregular_step(self): # Check that a `CoordinateNotRegularError` is captured. coord = AuxCoord(np.array([2, 5, 1, 4])) result = is_regular(coord) - self.assertFalse(result) + assert not result def test_scalar_coord(self): # Check that a `ValueError` is captured. coord = DimCoord(5) result = is_regular(coord) - self.assertFalse(result) + assert not result def test_coord_with_string_points(self): # Check that a `TypeError` is captured. coord = AuxCoord(["a", "b", "c"]) result = is_regular(coord) - self.assertFalse(result) + assert not result -class Test_regular_step(tests.IrisTest): +class Test_regular_step: def test_basic(self): dtype = np.float64 points = np.arange(5, dtype=dtype) coord = DimCoord(points) expected = np.mean(np.diff(points)) result = regular_step(coord) - self.assertEqual(expected, result) - self.assertEqual(result.dtype, dtype) + assert expected == result + assert result.dtype == dtype def test_2d_coord(self): coord = AuxCoord(np.arange(8).reshape(2, 4)) exp_emsg = "Expected 1D coord" - with self.assertRaisesRegex(CoordinateMultiDimError, exp_emsg): + with pytest.raises(CoordinateMultiDimError, match=exp_emsg): regular_step(coord) def test_scalar_coord(self): coord = DimCoord(5) exp_emsg = "non-scalar coord" - with self.assertRaisesRegex(ValueError, exp_emsg): + with pytest.raises(ValueError, match=exp_emsg): regular_step(coord) def test_coord_with_irregular_step(self): name = "latitude" coord = AuxCoord(np.array([2, 5, 1, 4]), standard_name=name) exp_emsg = "{} is not regular".format(name) - with self.assertRaisesRegex(CoordinateNotRegularError, exp_emsg): + with pytest.raises(CoordinateNotRegularError, match=exp_emsg): regular_step(coord) -class Test_points_step(tests.IrisTest): +class Test_points_step: def test_regular_points(self): regular_points = np.arange(5) exp_avdiff = np.mean(np.diff(regular_points)) result_avdiff, result = points_step(regular_points) - self.assertEqual(exp_avdiff, result_avdiff) - self.assertTrue(result) + assert exp_avdiff == result_avdiff + assert result def test_irregular_points(self): irregular_points = np.array([2, 5, 1, 4]) exp_avdiff = np.mean(np.diff(irregular_points)) result_avdiff, result = points_step(irregular_points) - self.assertEqual(exp_avdiff, result_avdiff) - self.assertFalse(result) + assert exp_avdiff == result_avdiff + assert not result def test_single_point(self): lone_point = np.array([4]) result_avdiff, result = points_step(lone_point) - self.assertTrue(np.isnan(result_avdiff)) - self.assertTrue(result) + assert np.isnan(result_avdiff) + assert result def test_no_points(self): no_points = np.array([]) result_avdiff, result = points_step(no_points) - self.assertTrue(np.isnan(result_avdiff)) - self.assertTrue(result) - - -if __name__ == "__main__": - tests.main() + assert np.isnan(result_avdiff) + assert result diff --git a/lib/iris/tests/unit/util/test__is_circular.py b/lib/iris/tests/unit/util/test__is_circular.py index 67099f49d6..8dc7d6f2a4 100644 --- a/lib/iris/tests/unit/util/test__is_circular.py +++ b/lib/iris/tests/unit/util/test__is_circular.py @@ -4,24 +4,16 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util._is_circular`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np from iris.util import _is_circular -class Test(tests.IrisTest): +class Test: def test_simple(self): data = np.arange(12) * 30 - self.assertTrue(_is_circular(data, 360)) + assert _is_circular(data, 360) def test_negative_diff(self): data = (np.arange(96) * -3.749998) + 3.56249908e02 - self.assertTrue(_is_circular(data, 360)) - - -if __name__ == "__main__": - tests.main() + assert _is_circular(data, 360) diff --git a/lib/iris/tests/unit/util/test__mask_array.py b/lib/iris/tests/unit/util/test__mask_array.py index 355730c166..990c17f3be 100644 --- a/lib/iris/tests/unit/util/test__mask_array.py +++ b/lib/iris/tests/unit/util/test__mask_array.py @@ -34,7 +34,7 @@ ) @pytest.mark.parametrize("lazy_mask", [False, True], ids=["real", "lazy"]) @pytest.mark.parametrize( - "array, expected", array_choices, ids=["plain-array", "masked-array"] + ("array", "expected"), array_choices, ids=["plain-array", "masked-array"] ) @pytest.mark.parametrize("lazy_array", [False, True], ids=["real", "lazy"]) def test_1d_not_in_place(array, mask, expected, lazy_array, lazy_mask): diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py index eda4f91055..fd692e2076 100644 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ b/lib/iris/tests/unit/util/test__slice_data_with_keys.py @@ -12,13 +12,11 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris._lazy_data import as_concrete_data, as_lazy_data +from iris.tests import _shared_utils from iris.util import _slice_data_with_keys @@ -77,15 +75,12 @@ def showkeys(keys_list): msg += "\n]" return msg - self.assertTrue( - equal, - errmsg.format(showkeys(calls_got), showkeys(expect_call_keys)), - ) + assert equal, errmsg.format(showkeys(calls_got), showkeys(expect_call_keys)) if expect_map is not None: - self.assertEqual(dim_map, expect_map) + assert dim_map == expect_map -class Test_indexing(MixinIndexingTest, tests.IrisTest): +class Test_indexing(MixinIndexingTest): # Check the indexing operations performed for various requested keys. def test_0d_nokeys(self): @@ -105,12 +100,12 @@ def test_1d_tuple(self): def test_fail_1d_2keys(self): msg = "More slices .* than dimensions" - with self.assertRaisesRegex(IndexError, msg): + with pytest.raises(IndexError, match=msg): self.check((3,), Index[1, 2]) def test_fail_empty_slice(self): msg = "Cannot index with zero length slice" - with self.assertRaisesRegex(IndexError, msg): + with pytest.raises(IndexError, match=msg): self.check((3,), Index[1:1]) def test_2d_tuple(self): @@ -192,7 +187,7 @@ def test_3d_multiple_tuples(self): # That's just what it does at present. -class Test_dimensions_mapping(MixinIndexingTest, tests.IrisTest): +class Test_dimensions_mapping(MixinIndexingTest): # Check the dimensions map returned for various requested keys. def test_1d_nochange(self): @@ -236,7 +231,7 @@ def test_3d_losedim1(self): ) -class TestResults(tests.IrisTest): +class TestResults: # Integration-style test, exercising (mostly) the same cases as above, # but checking actual results, for both real and lazy array inputs. @@ -246,10 +241,10 @@ def check(self, real_data, keys, expect_result, expect_map): real_dim_map, real_result = _slice_data_with_keys(real_data, keys) lazy_dim_map, lazy_result = _slice_data_with_keys(lazy_data, keys) lazy_result = as_concrete_data(lazy_result) - self.assertArrayEqual(real_result, expect_result) - self.assertArrayEqual(lazy_result, expect_result) - self.assertEqual(real_dim_map, expect_map) - self.assertEqual(lazy_dim_map, expect_map) + _shared_utils.assert_array_equal(real_result, expect_result) + _shared_utils.assert_array_equal(lazy_result, expect_result) + assert real_dim_map == expect_map + assert lazy_dim_map == expect_map def test_1d_int(self): self.check([1, 2, 3, 4], Index[2], [3], {None: None, 0: None}) @@ -262,12 +257,12 @@ def test_1d_tuple(self): def test_fail_1d_2keys(self): msg = "More slices .* than dimensions" - with self.assertRaisesRegex(IndexError, msg): + with pytest.raises(IndexError, match=msg): self.check([1, 2, 3], Index[1, 2], None, None) def test_fail_empty_slice(self): msg = "Cannot index with zero length slice" - with self.assertRaisesRegex(IndexError, msg): + with pytest.raises(IndexError, match=msg): self.check([1, 2, 3], Index[1:1], None, None) def test_2d_tuple(self): @@ -418,7 +413,3 @@ def test_3d_multiple_tuples(self): ) # NOTE: there seem to be an extra initial [:, :, :]. # That's just what it does at present. - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index d463ca6a4f..3e1aaf1bfb 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -4,141 +4,133 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.array_equal`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np import numpy.ma as ma from iris.util import array_equal -class Test(tests.IrisTest): +class Test: def test_0d(self): array_a = np.array(23) array_b = np.array(23) array_c = np.array(7) - self.assertTrue(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_a, array_c)) + assert array_equal(array_a, array_b) + assert not array_equal(array_a, array_c) def test_0d_and_scalar(self): array_a = np.array(23) - self.assertTrue(array_equal(array_a, 23)) - self.assertFalse(array_equal(array_a, 45)) + assert array_equal(array_a, 23) + assert not array_equal(array_a, 45) def test_1d_and_sequences(self): for sequence_type in (list, tuple): seq_a = sequence_type([1, 2, 3]) array_a = np.array(seq_a) - self.assertTrue(array_equal(array_a, seq_a)) - self.assertFalse(array_equal(array_a, seq_a[:-1])) + assert array_equal(array_a, seq_a) + assert not array_equal(array_a, seq_a[:-1]) array_a[1] = 45 - self.assertFalse(array_equal(array_a, seq_a)) + assert not array_equal(array_a, seq_a) def test_nd(self): array_a = np.array(np.arange(24).reshape(2, 3, 4)) array_b = np.array(np.arange(24).reshape(2, 3, 4)) array_c = np.array(np.arange(24).reshape(2, 3, 4)) array_c[0, 1, 2] = 100 - self.assertTrue(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_a, array_c)) + assert array_equal(array_a, array_b) + assert not array_equal(array_a, array_c) def test_masked_is_not_ignored(self): array_a = ma.masked_array([1, 2, 3], mask=[1, 0, 1]) array_b = ma.masked_array([2, 2, 2], mask=[1, 0, 1]) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_masked_is_different(self): array_a = ma.masked_array([1, 2, 3], mask=[1, 0, 1]) array_b = ma.masked_array([1, 2, 3], mask=[0, 0, 1]) - self.assertFalse(array_equal(array_a, array_b)) + assert not array_equal(array_a, array_b) def test_masked_isnt_unmasked(self): array_a = np.array([1, 2, 2]) array_b = ma.masked_array([1, 2, 2], mask=[0, 0, 1]) - self.assertFalse(array_equal(array_a, array_b)) + assert not array_equal(array_a, array_b) def test_masked_unmasked_equivelance(self): array_a = np.array([1, 2, 2]) array_b = ma.masked_array([1, 2, 2]) array_c = ma.masked_array([1, 2, 2], mask=[0, 0, 0]) - self.assertTrue(array_equal(array_a, array_b)) - self.assertTrue(array_equal(array_a, array_c)) + assert array_equal(array_a, array_b) + assert array_equal(array_a, array_c) def test_fully_masked_arrays(self): array_a = ma.masked_array(np.arange(24).reshape(2, 3, 4), mask=True) array_b = ma.masked_array(np.arange(24).reshape(2, 3, 4), mask=True) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_fully_masked_0d_arrays(self): array_a = ma.masked_array(3, mask=True) array_b = ma.masked_array(3, mask=True) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_fully_masked_string_arrays(self): array_a = ma.masked_array(["a", "b", "c"], mask=True) array_b = ma.masked_array(["a", "b", "c"], mask=[1, 1, 1]) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_partially_masked_string_arrays(self): array_a = ma.masked_array(["a", "b", "c"], mask=[1, 0, 1]) array_b = ma.masked_array(["a", "b", "c"], mask=[1, 0, 1]) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_string_arrays_equal(self): array_a = np.array(["abc", "def", "efg"]) array_b = np.array(["abc", "def", "efg"]) - self.assertTrue(array_equal(array_a, array_b)) + assert array_equal(array_a, array_b) def test_string_arrays_different_contents(self): array_a = np.array(["abc", "def", "efg"]) array_b = np.array(["abc", "de", "efg"]) - self.assertFalse(array_equal(array_a, array_b)) + assert not array_equal(array_a, array_b) def test_string_arrays_subset(self): array_a = np.array(["abc", "def", "efg"]) array_b = np.array(["abc", "def"]) - self.assertFalse(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_b, array_a)) + assert not array_equal(array_a, array_b) + assert not array_equal(array_b, array_a) def test_string_arrays_unequal_dimensionality(self): array_a = np.array("abc") array_b = np.array(["abc"]) array_c = np.array([["abc"]]) - self.assertFalse(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_b, array_a)) - self.assertFalse(array_equal(array_a, array_c)) - self.assertFalse(array_equal(array_b, array_c)) + assert not array_equal(array_a, array_b) + assert not array_equal(array_b, array_a) + assert not array_equal(array_a, array_c) + assert not array_equal(array_b, array_c) def test_string_arrays_0d_and_scalar(self): array_a = np.array("foobar") - self.assertTrue(array_equal(array_a, "foobar")) - self.assertFalse(array_equal(array_a, "foo")) - self.assertFalse(array_equal(array_a, "foobar.")) + assert array_equal(array_a, "foobar") + assert not array_equal(array_a, "foo") + assert not array_equal(array_a, "foobar.") def test_nan_equality_nan_ne_nan(self): array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) array_b = array_a.copy() - self.assertFalse(array_equal(array_a, array_a)) - self.assertFalse(array_equal(array_a, array_b)) + assert not array_equal(array_a, array_a) + assert not array_equal(array_a, array_b) def test_nan_equality_nan_naneq_nan(self): array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) array_b = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - self.assertTrue(array_equal(array_a, array_a, withnans=True)) - self.assertTrue(array_equal(array_a, array_b, withnans=True)) + assert array_equal(array_a, array_a, withnans=True) + assert array_equal(array_a, array_b, withnans=True) def test_nan_equality_nan_nanne_a(self): array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) array_b = np.array([1.0, np.nan, 2.0, 0.0, 3.0]) - self.assertFalse(array_equal(array_a, array_b, withnans=True)) + assert not array_equal(array_a, array_b, withnans=True) def test_nan_equality_a_nanne_b(self): array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) array_b = np.array([1.0, np.nan, 2.0, np.nan, 4.0]) - self.assertFalse(array_equal(array_a, array_b, withnans=True)) - - -if __name__ == "__main__": - tests.main() + assert not array_equal(array_a, array_b, withnans=True) diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py index 183f6f8d93..75dd2c6f97 100644 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ b/lib/iris/tests/unit/util/test_broadcast_to_shape.py @@ -4,10 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.broadcast_to_shape`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - from unittest import mock import dask @@ -15,17 +11,18 @@ import numpy as np import numpy.ma as ma +from iris.tests import _shared_utils from iris.util import broadcast_to_shape -class Test_broadcast_to_shape(tests.IrisTest): +class Test_broadcast_to_shape: def test_same_shape(self): # broadcast to current shape should result in no change rng = np.random.default_rng() a = rng.random((2, 3)) b = broadcast_to_shape(a, a.shape, (0, 1)) - self.assertArrayEqual(b, a) + _shared_utils.assert_array_equal(b, a) def test_added_dimensions(self): # adding two dimensions, on at the front and one in the middle of @@ -35,7 +32,7 @@ def test_added_dimensions(self): b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3)) for i in range(5): for j in range(4): - self.assertArrayEqual(b[i, :, j, :], a) + _shared_utils.assert_array_equal(b[i, :, j, :], a) def test_added_dimensions_transpose(self): # adding dimensions and having the dimensions of the input @@ -45,7 +42,7 @@ def test_added_dimensions_transpose(self): b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) for i in range(5): for j in range(4): - self.assertArrayEqual(b[i, :, j, :].T, a) + _shared_utils.assert_array_equal(b[i, :, j, :].T, a) @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) def test_lazy_added_dimensions_transpose(self, mocked_compute): @@ -57,7 +54,7 @@ def test_lazy_added_dimensions_transpose(self, mocked_compute): mocked_compute.assert_not_called() for i in range(5): for j in range(4): - self.assertArrayEqual(b[i, :, j, :].T.compute(), a.compute()) + _shared_utils.assert_array_equal(b[i, :, j, :].T.compute(), a.compute()) def test_masked(self): # masked arrays are also accepted @@ -67,7 +64,7 @@ def test_masked(self): b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) for i in range(5): for j in range(4): - self.assertMaskedArrayEqual(b[i, :, j, :].T, m) + _shared_utils.assert_masked_array_equal(b[i, :, j, :].T, m) @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) def test_lazy_masked(self, mocked_compute): @@ -79,7 +76,9 @@ def test_lazy_masked(self, mocked_compute): mocked_compute.assert_not_called() for i in range(5): for j in range(4): - self.assertMaskedArrayEqual(b[i, :, j, :].compute().T, m.compute()) + _shared_utils.assert_masked_array_equal( + b[i, :, j, :].compute().T, m.compute() + ) @mock.patch.object(dask.base, "compute", wraps=dask.base.compute) def test_lazy_chunks(self, mocked_compute): @@ -103,7 +102,9 @@ def test_lazy_chunks(self, mocked_compute): mocked_compute.assert_not_called() for i in range(3): for j in range(4): - self.assertMaskedArrayEqual(b[i, j, :].compute(), m[0].compute()) + _shared_utils.assert_masked_array_equal( + b[i, j, :].compute(), m[0].compute() + ) assert b.chunks == ((1, 1, 1), (2, 2), (2, 2, 1)) def test_masked_degenerate(self): @@ -114,8 +115,4 @@ def test_masked_degenerate(self): b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) for i in range(5): for j in range(4): - self.assertMaskedArrayEqual(b[i, :, j, :].T, m) - - -if __name__ == "__main__": - tests.main() + _shared_utils.assert_masked_array_equal(b[i, :, j, :].T, m) diff --git a/lib/iris/tests/unit/util/test_column_slices_generator.py b/lib/iris/tests/unit/util/test_column_slices_generator.py index fbb5a8f588..3dae8f72b5 100644 --- a/lib/iris/tests/unit/util/test_column_slices_generator.py +++ b/lib/iris/tests/unit/util/test_column_slices_generator.py @@ -4,32 +4,17 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.column_slices_generator`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.util import column_slices_generator -class Test_int_types(tests.IrisTest): - def _test(self, key): +class Test_int_types: + @pytest.mark.parametrize("key", [0, np.int32(0), np.int64(0)]) + def test(self, key): full_slice = (key,) ndims = 1 mapping, iterable = column_slices_generator(full_slice, ndims) - self.assertEqual(mapping, {0: None, None: None}) - self.assertEqual(list(iterable), [(0,)]) - - def test_int(self): - self._test(0) - - def test_int_32(self): - self._test(np.int32(0)) - - def test_int_64(self): - self._test(np.int64(0)) - - -if __name__ == "__main__": - tests.main() + assert mapping == {0: None, None: None} + assert list(iterable) == [(0,)] diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py index 65e3dec93b..2e06a75fc7 100644 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py @@ -4,51 +4,47 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import unittest +import pytest + import iris +from iris.tests import _shared_utils import iris.tests.stock as stock from iris.util import demote_dim_coord_to_aux_coord -class Test(tests.IrisTest): +class Test: def test_argument_is_basestring(self): cube_a = stock.simple_3d() cube_b = cube_a.copy() demote_dim_coord_to_aux_coord(cube_b, cube_b.coord("wibble")) - self.assertEqual( - cube_b.dim_coords, - (cube_a.coord("latitude"), cube_a.coord("longitude")), + assert cube_b.dim_coords == ( + cube_a.coord("latitude"), + cube_a.coord("longitude"), ) - @tests.skip_data + @_shared_utils.skip_data def test_argument_is_coord_instance(self): cube_a = stock.realistic_4d() cube_b = cube_a.copy() coord = cube_b.coord("model_level_number").copy() demote_dim_coord_to_aux_coord(cube_b, coord) - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), + assert cube_b.dim_coords == ( + cube_a.coord("time"), + cube_a.coord("grid_latitude"), + cube_a.coord("grid_longitude"), ) def test_old_dim_coord_is_now_aux_coord(self): cube_a = stock.hybrid_height() cube_b = cube_a.copy() demote_dim_coord_to_aux_coord(cube_b, "level_height") - self.assertTrue(cube_a.coord("level_height") in cube_b.aux_coords) + assert cube_a.coord("level_height") in cube_b.aux_coords def test_coord_of_that_name_does_not_exist(self): cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): + with pytest.raises(iris.exceptions.CoordinateNotFoundError): demote_dim_coord_to_aux_coord(cube_a, "wibble") def test_coord_does_not_exist(self): @@ -57,18 +53,18 @@ def test_coord_does_not_exist(self): coord = cube_b.coord("dim1").copy() coord.rename("new") demote_dim_coord_to_aux_coord(cube_b, coord) - self.assertEqual(cube_a, cube_b) + assert cube_a == cube_b def test_argument_is_wrong_type(self): cube_a = stock.simple_1d() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): demote_dim_coord_to_aux_coord(cube_a, 0.0) def test_trying_to_demote_a_scalar_coord(self): cube_a = stock.simple_2d_w_multidim_and_scalars() cube_b = cube_a.copy() demote_dim_coord_to_aux_coord(cube_b, "an_other") - self.assertEqual(cube_a, cube_b) + assert cube_a == cube_b if __name__ == "__main__": diff --git a/lib/iris/tests/unit/util/test_describe_diff.py b/lib/iris/tests/unit/util/test_describe_diff.py index 74bd71389e..0263a0de27 100644 --- a/lib/iris/tests/unit/util/test_describe_diff.py +++ b/lib/iris/tests/unit/util/test_describe_diff.py @@ -4,20 +4,20 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.describe_diff`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - from io import StringIO import numpy as np +import pytest import iris.cube +from iris.tests import _shared_utils from iris.util import describe_diff -class Test(iris.tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self, request): + self.request = request self.cube_a = iris.cube.Cube([]) self.cube_b = self.cube_a.copy() @@ -30,21 +30,26 @@ def test_noncommon_array_attributes(self): # test non-common array attribute self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString(return_str, ["compatible_cubes.str.txt"]) + _shared_utils.assert_string( + self.request, return_str, ["compatible_cubes.str.txt"] + ) def test_same_array_attributes(self): # test matching array attribute self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) self.cube_b.attributes["test_array"] = np.array([1, 2, 3]) return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString(return_str, ["compatible_cubes.str.txt"]) + _shared_utils.assert_string( + self.request, return_str, ["compatible_cubes.str.txt"] + ) def test_different_array_attributes(self): # test non-matching array attribute self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) self.cube_b.attributes["test_array"] = np.array([1, 7, 3]) return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString( + _shared_utils.assert_string( + self.request, return_str, [ "unit", @@ -53,7 +58,3 @@ def test_different_array_attributes(self): "incompatible_array_attrs.str.txt", ], ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py index 9b09c84dd4..1392f9cff8 100644 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ b/lib/iris/tests/unit/util/test_equalise_attributes.py @@ -4,14 +4,12 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.util.equalise_attributes` function.""" -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.coords import AuxCoord from iris.cube import Cube, CubeAttrsDict +from iris.tests import _shared_utils import iris.tests.stock from iris.tests.unit.common.metadata.test_CubeMetadata import ( _TEST_ATTRNAME, @@ -20,8 +18,9 @@ from iris.util import equalise_attributes -class TestEqualiseAttributes(tests.IrisTest): - def setUp(self): +class TestEqualiseAttributes: + @pytest.fixture(autouse=True) + def _setup(self): empty = Cube([]) self.cube_no_attrs = empty.copy() @@ -66,21 +65,25 @@ def _test(self, cubes, expect_attributes, expect_removed): # Exercise basic operation actual_removed = equalise_attributes(working_cubes) # Check they are the same cubes - self.assertEqual(working_cubes, original_working_list) + assert working_cubes == original_working_list # Check resulting attributes all match the expected set for cube in working_cubes: - self.assertEqual(cube.attributes, expect_attributes) + assert cube.attributes == expect_attributes # Check removed attributes all match as expected - self.assertEqual(len(actual_removed), len(expect_removed)) + assert len(actual_removed) == len(expect_removed) for actual, expect in zip(actual_removed, expect_removed): - self.assertEqual(actual, expect) + if isinstance(actual, dict): + _shared_utils.assert_dict_equal(actual, expect) + else: + _shared_utils.assert_array_equal(actual, expect) + # Check everything else remains the same for new_cube, old_cube in zip(working_cubes, cubes): cube_before_noatts = old_cube.copy() cube_before_noatts.attributes.clear() cube_after_noatts = new_cube.copy() cube_after_noatts.attributes.clear() - self.assertEqual(cube_after_noatts, cube_before_noatts) + assert cube_after_noatts == cube_before_noatts def test_no_attrs(self): cubes = [self.cube_no_attrs] @@ -126,7 +129,7 @@ def test_array_same(self): cubes = [self.cube_a1b5v1, self.cube_a1b6v1] self._test(cubes, {"a": 1, "v": self.v1}, [{"b": 5}, {"b": 6}]) - @tests.skip_data + @_shared_utils.skip_data def test_complex_nonecommon(self): # Example with cell methods and factories, but no common attributes. cubes = [ @@ -136,7 +139,7 @@ def test_complex_nonecommon(self): removed = cubes[0].attributes.copy() self._test(cubes, {}, [removed, {}]) - @tests.skip_data + @_shared_utils.skip_data def test_complex_somecommon(self): # Example with cell methods and factories, plus some common attributes. cubes = [iris.tests.stock.global_pp(), iris.tests.stock.simple_pp()] @@ -250,7 +253,3 @@ def test(self): ] equalise_attributes(coords) assert all(coord.attributes == {"b": "all_the_same"} for coord in coords) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index 567b2a1439..bba3f1fe37 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -4,28 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.test_file_is_newer`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import os import os.path -import shutil -import tempfile + +import pytest from iris.util import file_is_newer_than -class TestFileIsNewer(tests.IrisTest): +class TestFileIsNewer: """Test the :func:`iris.util.file_is_newer_than` function.""" def _name2path(self, filename): """Add the temporary dirpath to a filename to make a full path.""" return os.path.join(self.temp_dir, filename) - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self, tmp_path): # make a temporary directory with testfiles of known timestamp order. - self.temp_dir = tempfile.mkdtemp("_testfiles_tempdir") + self.temp_dir = tmp_path / "_testfiles_tempdir" + self.temp_dir.mkdir() # define the names of some files to create create_file_names = [ "older_source_1", @@ -44,10 +42,6 @@ def setUp(self): mtime += 5.0 + 10.0 * i_file os.utime(file_path, (mtime, mtime)) - def tearDown(self): - # destroy whole contents of temporary directory - shutil.rmtree(self.temp_dir) - def _test(self, boolean_result, result_name, source_names): """Test expected result of executing with given args.""" # Make args into full paths @@ -57,7 +51,7 @@ def _test(self, boolean_result, result_name, source_names): else: source_paths = [self._name2path(name) for name in source_names] # Check result is as expected. - self.assertEqual(boolean_result, file_is_newer_than(result_path, source_paths)) + assert boolean_result == file_is_newer_than(result_path, source_paths) def test_no_sources(self): self._test(True, "example_result", []) @@ -95,28 +89,22 @@ def test_wild_fail(self): self._test(False, "example_result", ["older_sour*", "newer_sour*"]) def test_error_missing_result(self): - with self.assertRaises(OSError) as error_trap: + with pytest.raises(OSError) as error_trap: self._test(False, "non_exist", ["older_sour*"]) - error = error_trap.exception - self.assertEqual(error.strerror, "No such file or directory") - self.assertEqual(error.filename, self._name2path("non_exist")) + error = error_trap.value + assert error.strerror == "No such file or directory" + assert error.filename == self._name2path("non_exist") def test_error_missing_source(self): - with self.assertRaises(IOError) as error_trap: + with pytest.raises(IOError) as error_trap: self._test(False, "example_result", ["older_sour*", "non_exist"]) - self.assertIn( - "One or more of the files specified did not exist", - str(error_trap.exception), + assert ( + "One or more of the files specified did not exist" in error_trap.exconly() ) def test_error_missing_wild(self): - with self.assertRaises(IOError) as error_trap: + with pytest.raises(IOError) as error_trap: self._test(False, "example_result", ["older_sour*", "unknown_*"]) - self.assertIn( - "One or more of the files specified did not exist", - str(error_trap.exception), + assert ( + "One or more of the files specified did not exist" in error_trap.exconly() ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py index e3e824e442..6623121317 100644 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ b/lib/iris/tests/unit/util/test_find_discontiguities.py @@ -4,12 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.find_discontiguities.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest +from iris.tests import _shared_utils from iris.tests.stock import ( make_bounds_discontiguous_at_point, sample_2d_latlons, @@ -22,9 +20,10 @@ def full2d_global(): return sample_2d_latlons(transformed=True) -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): +@_shared_utils.skip_data +class Test: + @pytest.fixture(autouse=True) + def _setup(self): # Set up a 2d lat-lon cube with 2d coordinates that have been # transformed so they are not in a regular lat-lon grid. # Then generate a discontiguity at a single lat-lon point. @@ -53,7 +52,7 @@ def test_find_discontiguities_right(self): cube = self.testcube_discontig_right expected = cube.data.mask returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) + assert np.all(expected == returned) def test_find_discontiguities_left(self): # Check that the mask we generate when making the discontiguity @@ -61,7 +60,7 @@ def test_find_discontiguities_left(self): cube = self.testcube_discontig_left expected = cube.data.mask returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) + assert np.all(expected == returned) def test_find_discontiguities_top(self): # Check that the mask we generate when making the discontiguity @@ -69,7 +68,7 @@ def test_find_discontiguities_top(self): cube = self.testcube_discontig_top expected = cube.data.mask returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) + assert np.all(expected == returned) def test_find_discontiguities_bottom(self): # Check that the mask we generate when making the discontiguity @@ -77,13 +76,13 @@ def test_find_discontiguities_bottom(self): cube = self.testcube_discontig_along_bottom expected = cube.data.mask returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) + assert np.all(expected == returned) def test_find_discontiguities_1d_coord(self): # Check that an error is raised when we try and use # find_discontiguities on 1D coordinates: cube = simple_3d() - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): find_discontiguities(cube) def test_find_discontiguities_with_atol(self): @@ -95,7 +94,7 @@ def test_find_discontiguities_with_atol(self): # to represent a mask showing no discontiguities expected = np.zeros(cube.shape, dtype=bool) returned = find_discontiguities(cube, abs_tol=atol) - self.assertTrue(np.all(expected == returned)) + assert np.all(expected == returned) def test_find_discontiguities_with_rtol(self): cube = self.testcube_discontig_right @@ -106,8 +105,4 @@ def test_find_discontiguities_with_rtol(self): # to represent a mask showing no discontiguities expected = np.zeros(cube.shape, dtype=bool) returned = find_discontiguities(cube, rel_tol=rtol) - self.assertTrue(np.all(expected == returned)) - - -if __name__ == "__main__": - tests.main() + assert np.all(expected == returned) diff --git a/lib/iris/tests/unit/util/test_guess_coord_axis.py b/lib/iris/tests/unit/util/test_guess_coord_axis.py index d946565196..5f4d60883e 100644 --- a/lib/iris/tests/unit/util/test_guess_coord_axis.py +++ b/lib/iris/tests/unit/util/test_guess_coord_axis.py @@ -11,7 +11,7 @@ class TestGuessCoord: @pytest.mark.parametrize( - "coordinate, axis", + ("coordinate", "axis"), [ ("longitude", "X"), ("grid_longitude", "X"), @@ -26,7 +26,7 @@ def test_coord(self, coordinate, axis, sample_coord): assert guess_coord_axis(sample_coord) == axis @pytest.mark.parametrize( - "units, axis", + ("units", "axis"), [ ("hPa", "Z"), ("days since 1970-01-01 00:00:00", "T"), @@ -37,7 +37,7 @@ def test_units(self, units, axis, sample_coord): assert guess_coord_axis(sample_coord) == axis @pytest.mark.parametrize( - "ignore_axis, result", + ("ignore_axis", "result"), [ (True, None), (False, "X"), diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py index 47f2774b95..29fd2785a8 100644 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ b/lib/iris/tests/unit/util/test_mask_cube.py @@ -4,16 +4,13 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.mask_cube.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import pathlib import dask.array as da -import numpy as np import numpy.ma as ma +import pytest +from iris.tests import _shared_utils from iris.tests.stock import ( make_bounds_discontiguous_at_point, sample_2d_latlons, @@ -29,24 +26,30 @@ def full2d_global(): class MaskCubeMixin: - def assertOriginalMetadata(self, cube, func): + @pytest.fixture(autouse=True) + def _get_request(self, request): + self.request = request + + def assert_original_metadata(self, cube, func): """Check metadata matches that of input cube. func is a string indicating which function created the original cube. """ reference_dir = pathlib.Path("unit/util/mask_cube") reference_fname = reference_dir / f"original_cube_{func}.cml" - self.assertCML( + _shared_utils.assert_CML( + self.request, cube, reference_filename=str(reference_fname), checksum=False, ) -class TestArrayMask(tests.IrisTest, MaskCubeMixin): +class TestArrayMask(MaskCubeMixin): """Tests with mask specified as numpy array.""" - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Set up a 2d cube with a masked discontiguity to test masking # of 2-dimensional cubes self.cube_2d = full2d_global() @@ -63,9 +66,9 @@ def test_mask_cube_2d_in_place(self): # comparing with masked data cube.data = cube.data.data returned = mask_cube(cube, discontiguity_array, in_place=True) - np.testing.assert_array_equal(expected.data.mask, cube.data.mask) - self.assertOriginalMetadata(cube, "full2d_global") - self.assertIs(returned, None) + _shared_utils.assert_array_equal(expected.data.mask, cube.data.mask) + self.assert_original_metadata(cube, "full2d_global") + assert returned is None def test_mask_cube_2d_not_in_place(self): # This tests the masking of a 2d data array @@ -78,29 +81,30 @@ def test_mask_cube_2d_not_in_place(self): # comparing with masked data cube.data = cube.data.data returned = mask_cube(cube, discontiguity_array, in_place=False) - np.testing.assert_array_equal(expected.data.mask, returned.data.mask) - self.assertOriginalMetadata(returned, "full2d_global") - self.assertFalse(ma.is_masked(cube.data)) + _shared_utils.assert_array_equal(expected.data.mask, returned.data.mask) + self.assert_original_metadata(returned, "full2d_global") + assert not ma.is_masked(cube.data) def test_mask_cube_lazy_in_place_broadcast(self): cube = simple_2d() cube.data = cube.lazy_data() mask = [0, 1, 1, 0] returned = mask_cube(cube, mask, in_place=True) - self.assertTrue(cube.has_lazy_data()) + assert cube.has_lazy_data() # Touch the data so lazyness status doesn't affect CML check. cube.data - self.assertOriginalMetadata(cube, "simple_2d") + self.assert_original_metadata(cube, "simple_2d") for subcube in cube.slices("foo"): # Mask should have been broadcast across "bar" dimension. - np.testing.assert_array_equal(subcube.data.mask, mask) - self.assertIs(returned, None) + _shared_utils.assert_array_equal(subcube.data.mask, mask) + assert returned is None -class TestCoordMask(tests.IrisTest, MaskCubeMixin): +class TestCoordMask(MaskCubeMixin): """Tests with mask specified as a Coord.""" - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = simple_2d() def test_mask_cube_2d_first_dim(self): @@ -110,24 +114,25 @@ def test_mask_cube_2d_first_dim(self): returned = mask_cube(self.cube, mask_coord, in_place=False) # Remove extra coord so we can check against original metadata. returned.remove_coord(mask_coord) - self.assertOriginalMetadata(returned, "simple_2d") + self.assert_original_metadata(returned, "simple_2d") for subcube in returned.slices("bar"): # Mask should have been broadcast across "foo" dimension. - np.testing.assert_array_equal(subcube.data.mask, mask_coord.points) + _shared_utils.assert_array_equal(subcube.data.mask, mask_coord.points) def test_mask_cube_2d_second_dim(self): mask_coord = iris.coords.AuxCoord([0, 0, 1, 1], long_name="mask", units=1) returned = mask_cube(self.cube, mask_coord, in_place=False, dim=1) - self.assertOriginalMetadata(returned, "simple_2d") + self.assert_original_metadata(returned, "simple_2d") for subcube in returned.slices("foo"): # Mask should have been broadcast across "bar" dimension. - np.testing.assert_array_equal(subcube.data.mask, mask_coord.points) + _shared_utils.assert_array_equal(subcube.data.mask, mask_coord.points) -class TestCubeMask(tests.IrisTest, MaskCubeMixin): +class TestCubeMask(MaskCubeMixin): """Tests with mask specified as a Cube.""" - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = simple_2d() def test_mask_cube_2d_first_dim_not_in_place(self): @@ -135,21 +140,21 @@ def test_mask_cube_2d_first_dim_not_in_place(self): mask.add_dim_coord(self.cube.coord("bar"), 0) returned = mask_cube(self.cube, mask, in_place=False) - self.assertOriginalMetadata(returned, "simple_2d") + self.assert_original_metadata(returned, "simple_2d") for subcube in returned.slices("bar"): # Mask should have been broadcast across 'foo' dimension. - np.testing.assert_array_equal(subcube.data.mask, mask.data) + _shared_utils.assert_array_equal(subcube.data.mask, mask.data) def test_mask_cube_2d_first_dim_in_place(self): mask = iris.cube.Cube([0, 1, 0], long_name="mask", units=1) mask.add_dim_coord(self.cube.coord("bar"), 0) returned = mask_cube(self.cube, mask, in_place=True) - self.assertOriginalMetadata(self.cube, "simple_2d") + self.assert_original_metadata(self.cube, "simple_2d") for subcube in self.cube.slices("bar"): # Mask should have been broadcast across 'foo' dimension. - np.testing.assert_array_equal(subcube.data.mask, mask.data) - self.assertIs(returned, None) + _shared_utils.assert_array_equal(subcube.data.mask, mask.data) + assert returned is None def test_mask_cube_2d_create_new_dim(self): mask = iris.cube.Cube([[0, 1, 0], [0, 0, 1]], long_name="mask", units=1) @@ -163,27 +168,23 @@ def test_mask_cube_2d_create_new_dim(self): cube = iris.util.new_axis(self.cube, "baz") returned = mask_cube(cube, mask, in_place=False) - self.assertCML(cube, checksum=False) + _shared_utils.assert_CML(self.request, cube, checksum=False) for subcube in returned.slices_over("baz"): # Underlying data should have been broadcast across 'baz' dimension. - np.testing.assert_array_equal(subcube.data, self.cube.data) + _shared_utils.assert_array_equal(subcube.data, self.cube.data) for subcube in returned.slices_over("foo"): # Mask should have been broadcast across 'foo' dimension. - np.testing.assert_array_equal(subcube.data.mask, mask.data) + _shared_utils.assert_array_equal(subcube.data.mask, mask.data) def test_mask_cube_1d_lazy_mask_in_place(self): cube = simple_1d() mask = cube.copy(da.from_array([0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1])) returned = mask_cube(cube, mask, in_place=True) - self.assertIs(returned, None) - self.assertTrue(cube.has_lazy_data()) + assert returned is None + assert cube.has_lazy_data() # Touch the data so lazyness status doesn't interfere with CML check. cube.data - self.assertOriginalMetadata(cube, "simple_1d") - np.testing.assert_array_equal(cube.data.mask, mask.data) - - -if __name__ == "__main__": - tests.main() + self.assert_original_metadata(cube, "simple_1d") + _shared_utils.assert_array_equal(cube.data.mask, mask.data) diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py index 7a03ea91aa..bdd5c5fc56 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py @@ -11,15 +11,15 @@ from iris.coord_systems import RotatedGeogCS from iris.coords import DimCoord import iris.cube -import iris.tests as tests from iris.util import mask_cube_from_shapefile from iris.warnings import IrisUserWarning -class TestBasicCubeMasking(tests.IrisTest): +class TestBasicCubeMasking: """Unit tests for mask_cube_from_shapefile function.""" - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): basic_data = np.array([[1, 2, 3], [4, 8, 12]]) self.basic_cube = iris.cube.Cube(basic_data) coord = DimCoord( diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 5ba0496854..4ade2eb61c 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -4,12 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.new_axis`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -# isort: off -import iris.tests as tests # noqa - -# isort: on import copy import numpy as np @@ -24,7 +18,7 @@ class Test: - @pytest.fixture + @pytest.fixture() def stock_cube(self): cube = stock.simple_2d_w_cell_measure_ancil_var() time = iris.coords.DimCoord([1], standard_name="time") diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index 4631f910a9..bceffe700d 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -4,97 +4,90 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import unittest +import pytest + import iris +from iris.tests import _shared_utils import iris.tests.stock as stock from iris.util import promote_aux_coord_to_dim_coord -class Test(tests.IrisTest): +class Test: def test_dimension_already_has_dimcoord(self): cube_a = stock.hybrid_height() cube_b = cube_a.copy() promote_aux_coord_to_dim_coord(cube_b, "model_level_number") - self.assertEqual(cube_b.dim_coords, (cube_a.coord("model_level_number"),)) + assert cube_b.dim_coords == (cube_a.coord("model_level_number"),) def test_old_dim_coord_is_now_aux_coord(self): cube_a = stock.hybrid_height() cube_b = cube_a.copy() promote_aux_coord_to_dim_coord(cube_b, "model_level_number") - self.assertTrue(cube_a.coord("level_height") in cube_b.aux_coords) + assert cube_a.coord("level_height") in cube_b.aux_coords - @tests.skip_data + @_shared_utils.skip_data def test_argument_is_coord_instance(self): cube_a = stock.realistic_4d() cube_b = cube_a.copy() promote_aux_coord_to_dim_coord(cube_b, cube_b.coord("level_height")) - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("level_height"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), + assert cube_b.dim_coords == ( + cube_a.coord("time"), + cube_a.coord("level_height"), + cube_a.coord("grid_latitude"), + cube_a.coord("grid_longitude"), ) - @tests.skip_data + @_shared_utils.skip_data def test_dimension_is_anonymous(self): cube_a = stock.realistic_4d() cube_b = cube_a.copy() cube_b.remove_coord("model_level_number") promote_aux_coord_to_dim_coord(cube_b, "level_height") - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("level_height"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), + assert cube_b.dim_coords == ( + cube_a.coord("time"), + cube_a.coord("level_height"), + cube_a.coord("grid_latitude"), + cube_a.coord("grid_longitude"), ) def test_already_a_dim_coord(self): cube_a = stock.simple_2d_w_multidim_and_scalars() cube_b = cube_a.copy() promote_aux_coord_to_dim_coord(cube_b, "dim1") - self.assertEqual(cube_a, cube_b) + assert cube_a == cube_b def test_coord_of_that_name_does_not_exist(self): cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): + with pytest.raises(iris.exceptions.CoordinateNotFoundError): promote_aux_coord_to_dim_coord(cube_a, "wibble") def test_coord_does_not_exist(self): cube_a = stock.simple_2d_w_multidim_and_scalars() coord = cube_a.coord("dim1").copy() coord.rename("new") - with self.assertRaises(ValueError): + with pytest.raises(ValueError): promote_aux_coord_to_dim_coord(cube_a, coord) def test_argument_is_wrong_type(self): cube_a = stock.simple_1d() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): promote_aux_coord_to_dim_coord(cube_a, 0.0) def test_trying_to_promote_a_multidim_coord(self): cube_a = stock.simple_2d_w_multidim_coords() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): promote_aux_coord_to_dim_coord(cube_a, "bar") def test_trying_to_promote_a_scalar_coord(self): cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): promote_aux_coord_to_dim_coord(cube_a, "an_other") def test_trying_to_promote_a_nonmonotonic_coord(self): cube_a = stock.hybrid_height() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): promote_aux_coord_to_dim_coord(cube_a, "surface_altitude") diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index 562447aaf7..77d6df0251 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -4,71 +4,68 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.reverse`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - import numpy as np +import pytest import iris +from iris.tests import _shared_utils from iris.util import reverse -class Test_array(tests.IrisTest): +class Test_array: def test_simple_array(self): a = np.arange(12).reshape(3, 4) - self.assertArrayEqual(a[::-1], reverse(a, 0)) - self.assertArrayEqual(a[::-1, ::-1], reverse(a, [0, 1])) - self.assertArrayEqual(a[:, ::-1], reverse(a, 1)) - self.assertArrayEqual(a[:, ::-1], reverse(a, [1])) + _shared_utils.assert_array_equal(a[::-1], reverse(a, 0)) + _shared_utils.assert_array_equal(a[::-1, ::-1], reverse(a, [0, 1])) + _shared_utils.assert_array_equal(a[:, ::-1], reverse(a, 1)) + _shared_utils.assert_array_equal(a[:, ::-1], reverse(a, [1])) msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, []) msg = "An axis value out of range for the number of dimensions *" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, -1) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, 10) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, [-1]) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, [0, -1]) msg = "To reverse an array, provide an int *" - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): reverse(a, "latitude") def test_single_array(self): a = np.arange(36).reshape(3, 4, 3) - self.assertArrayEqual(a[::-1], reverse(a, 0)) - self.assertArrayEqual(a[::-1, ::-1], reverse(a, [0, 1])) - self.assertArrayEqual(a[:, ::-1, ::-1], reverse(a, [1, 2])) - self.assertArrayEqual(a[..., ::-1], reverse(a, 2)) + _shared_utils.assert_array_equal(a[::-1], reverse(a, 0)) + _shared_utils.assert_array_equal(a[::-1, ::-1], reverse(a, [0, 1])) + _shared_utils.assert_array_equal(a[:, ::-1, ::-1], reverse(a, [1, 2])) + _shared_utils.assert_array_equal(a[..., ::-1], reverse(a, 2)) msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, []) msg = "An axis value out of range for the number of dimensions *" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, -1) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, 10) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, [-1]) - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(a, [0, -1]) - with self.assertRaisesRegex(TypeError, "To reverse an array, provide an int *"): + with pytest.raises(TypeError, match="To reverse an array, provide an int *"): reverse(a, "latitude") -class Test_cube(tests.IrisTest): - def setUp(self): +class Test_cube: + @pytest.fixture(autouse=True) + def _setup(self): # On this cube pair, the coordinates to perform operations on have # matching long names but the points array on one cube is reversed # with respect to that on the other. @@ -95,81 +92,99 @@ def setUp(self): self.cube2 = iris.cube.Cube(data, dim_coords_and_dims=[(a2, 0), (b2, 1)]) def check_coorda_reversed(self, result): - self.assertArrayEqual(self.cube2.coord("a").points, result.coord("a").points) - self.assertArrayEqual(self.cube2.coord("a").bounds, result.coord("a").bounds) + _shared_utils.assert_array_equal( + self.cube2.coord("a").points, result.coord("a").points + ) + _shared_utils.assert_array_equal( + self.cube2.coord("a").bounds, result.coord("a").bounds + ) def check_coorda_unchanged(self, result): - self.assertArrayEqual(self.cube1.coord("a").points, result.coord("a").points) - self.assertArrayEqual(self.cube1.coord("a").bounds, result.coord("a").bounds) + _shared_utils.assert_array_equal( + self.cube1.coord("a").points, result.coord("a").points + ) + _shared_utils.assert_array_equal( + self.cube1.coord("a").bounds, result.coord("a").bounds + ) def check_coordb_reversed(self, result): - self.assertArrayEqual(self.cube2.coord("b").points, result.coord("b").points) + _shared_utils.assert_array_equal( + self.cube2.coord("b").points, result.coord("b").points + ) def check_coordb_unchanged(self, result): - self.assertArrayEqual(self.cube1.coord("b").points, result.coord("b").points) + _shared_utils.assert_array_equal( + self.cube1.coord("b").points, result.coord("b").points + ) def test_cube_dim0(self): cube1_reverse0 = reverse(self.cube1, 0) - self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) + _shared_utils.assert_array_equal(self.cube1.data[::-1], cube1_reverse0.data) self.check_coorda_reversed(cube1_reverse0) self.check_coordb_unchanged(cube1_reverse0) def test_cube_dim1(self): cube1_reverse1 = reverse(self.cube1, 1) - self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) + _shared_utils.assert_array_equal(self.cube1.data[:, ::-1], cube1_reverse1.data) self.check_coordb_reversed(cube1_reverse1) self.check_coorda_unchanged(cube1_reverse1) def test_cube_dim_both(self): cube1_reverse_both = reverse(self.cube1, (0, 1)) - self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_both.data) + _shared_utils.assert_array_equal( + self.cube1.data[::-1, ::-1], cube1_reverse_both.data + ) self.check_coorda_reversed(cube1_reverse_both) self.check_coordb_reversed(cube1_reverse_both) def test_cube_coord0(self): cube1_reverse0 = reverse(self.cube1, self.a1) - self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) + _shared_utils.assert_array_equal(self.cube1.data[::-1], cube1_reverse0.data) self.check_coorda_reversed(cube1_reverse0) self.check_coordb_unchanged(cube1_reverse0) def test_cube_coord1(self): cube1_reverse1 = reverse(self.cube1, "b") - self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) + _shared_utils.assert_array_equal(self.cube1.data[:, ::-1], cube1_reverse1.data) self.check_coordb_reversed(cube1_reverse1) self.check_coorda_unchanged(cube1_reverse1) def test_cube_coord_both(self): cube1_reverse_both = reverse(self.cube1, (self.a1, self.b1)) - self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_both.data) + _shared_utils.assert_array_equal( + self.cube1.data[::-1, ::-1], cube1_reverse_both.data + ) self.check_coorda_reversed(cube1_reverse_both) self.check_coordb_reversed(cube1_reverse_both) def test_cube_coord_spanning(self): cube1_reverse_spanning = reverse(self.cube1, "spanning") - self.assertArrayEqual(self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data) + _shared_utils.assert_array_equal( + self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data + ) self.check_coorda_reversed(cube1_reverse_spanning) self.check_coordb_reversed(cube1_reverse_spanning) - self.assertArrayEqual( + _shared_utils.assert_array_equal( self.span.points[::-1, ::-1], cube1_reverse_spanning.coord("spanning").points, ) def test_wrong_coord_name(self): msg = "Expected to find exactly 1 'latitude' coordinate, but found none." - with self.assertRaisesRegex(iris.exceptions.CoordinateNotFoundError, msg): + with pytest.raises(iris.exceptions.CoordinateNotFoundError, match=msg): reverse(self.cube1, "latitude") def test_empty_list(self): msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): reverse(self.cube1, []) def test_wrong_type_cube(self): @@ -177,14 +192,10 @@ def test_wrong_type_cube(self): "coords_or_dims must be int, str, coordinate or sequence of " "these. Got cube." ) - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): reverse(self.cube1, self.cube1) def test_wrong_type_float(self): msg = "coords_or_dims must be int, str, coordinate or sequence of these." - with self.assertRaisesRegex(TypeError, msg): + with pytest.raises(TypeError, match=msg): reverse(self.cube1, 3.0) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index d70b398ed5..c2e5bdbb6c 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -4,24 +4,22 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.rolling_window`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import dask.array as da import numpy as np import numpy.ma as ma +import pytest +from iris.tests import _shared_utils from iris.util import rolling_window -class Test_rolling_window(tests.IrisTest): +class Test_rolling_window: def test_1d(self): # 1-d array input a = np.array([0, 1, 2, 3, 4], dtype=np.int32) expected_result = np.array([[0, 1], [1, 2], [2, 3], [3, 4]], dtype=np.int32) result = rolling_window(a, window=2) - self.assertArrayEqual(result, expected_result) + _shared_utils.assert_array_equal(result, expected_result) def test_2d(self): # 2-d array input @@ -34,13 +32,13 @@ def test_2d(self): dtype=np.int32, ) result = rolling_window(a, window=3, axis=1) - self.assertArrayEqual(result, expected_result) + _shared_utils.assert_array_equal(result, expected_result) def test_3d_lazy(self): a = da.arange(2 * 3 * 4).reshape((2, 3, 4)) expected_result = np.arange(2 * 3 * 4).reshape((1, 2, 3, 4)) result = rolling_window(a, window=2, axis=0).compute() - self.assertArrayEqual(result, expected_result) + _shared_utils.assert_array_equal(result, expected_result) def test_1d_masked(self): # 1-d masked array input @@ -51,7 +49,7 @@ def test_1d_masked(self): dtype=np.int32, ) result = rolling_window(a, window=2) - self.assertMaskedArrayEqual(result, expected_result) + _shared_utils.assert_masked_array_equal(result, expected_result) def test_2d_masked(self): # 2-d masked array input @@ -72,7 +70,7 @@ def test_2d_masked(self): dtype=np.int32, ) result = rolling_window(a, window=3, axis=1) - self.assertMaskedArrayEqual(result, expected_result) + _shared_utils.assert_masked_array_equal(result, expected_result) def test_degenerate_mask(self): a = ma.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32) @@ -88,7 +86,7 @@ def test_degenerate_mask(self): dtype=np.int32, ) result = rolling_window(a, window=3, axis=1) - self.assertMaskedArrayEqual(result, expected_result) + _shared_utils.assert_masked_array_equal(result, expected_result) def test_step(self): # step should control how far apart consecutive windows are @@ -97,27 +95,23 @@ def test_step(self): [[[0, 1, 2], [2, 3, 4]], [[5, 6, 7], [7, 8, 9]]], dtype=np.int32 ) result = rolling_window(a, window=3, step=2, axis=1) - self.assertArrayEqual(result, expected_result) + _shared_utils.assert_array_equal(result, expected_result) def test_window_too_short(self): # raise an error if the window length is less than 1 a = np.empty([5]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): rolling_window(a, window=0) def test_window_too_long(self): # raise an error if the window length is longer than the # corresponding array dimension a = np.empty([7, 5]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): rolling_window(a, window=6, axis=1) def test_invalid_step(self): # raise an error if the step between windows is less than 1 a = np.empty([5]) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): rolling_window(a, step=0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_squeeze.py b/lib/iris/tests/unit/util/test_squeeze.py index cb4b55c1e6..c1a63a64b6 100644 --- a/lib/iris/tests/unit/util/test_squeeze.py +++ b/lib/iris/tests/unit/util/test_squeeze.py @@ -4,47 +4,40 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.squeeze`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest +import pytest import iris import iris.tests.stock as stock -class Test(tests.IrisTest): - def setUp(self): +class Test: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.simple_2d_w_multidim_and_scalars() def test_no_change(self): - self.assertEqual(self.cube, iris.util.squeeze(self.cube)) + assert self.cube == iris.util.squeeze(self.cube) def test_squeeze_one_dim(self): cube_3d = iris.util.new_axis(self.cube, scalar_coord="an_other") cube_2d = iris.util.squeeze(cube_3d) - self.assertEqual(self.cube, cube_2d) + assert self.cube == cube_2d def test_squeeze_two_dims(self): cube_3d = iris.util.new_axis(self.cube, scalar_coord="an_other") cube_4d = iris.util.new_axis(cube_3d, scalar_coord="air_temperature") - self.assertEqual(self.cube, iris.util.squeeze(cube_4d)) + assert self.cube == iris.util.squeeze(cube_4d) def test_squeeze_one_anonymous_dim(self): cube_3d = iris.util.new_axis(self.cube) cube_2d = iris.util.squeeze(cube_3d) - self.assertEqual(self.cube, cube_2d) + assert self.cube == cube_2d def test_squeeze_to_scalar_cube(self): cube_scalar = self.cube[0, 0] cube_1d = iris.util.new_axis(cube_scalar) - self.assertEqual(cube_scalar, iris.util.squeeze(cube_1d)) - - -if __name__ == "__main__": - unittest.main() + assert cube_scalar == iris.util.squeeze(cube_1d) diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py index c70bb25a0f..ceccc459ce 100644 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ b/lib/iris/tests/unit/util/test_unify_time_units.py @@ -4,21 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.array_equal`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import copy import cf_units import numpy as np import iris +from iris.tests import _shared_utils import iris.tests.stock as stock from iris.util import unify_time_units -class Test(tests.IrisTest): +class Test: def simple_1d_time_cubes(self, calendar="standard"): coord_points = [1, 2, 3, 4, 5] data_points = [273, 275, 278, 277, 274] @@ -51,7 +48,7 @@ def _common(self, expected, result, coord_name="time"): except iris.exceptions.CoordinateNotFoundError: pass else: - self.assertEqual(expected, epoch) + assert expected == epoch def test_cubelist_with_time_coords(self): # Tests an :class:`iris.cube.CubeList` containing cubes with time @@ -69,16 +66,16 @@ def test_list_of_cubes_with_time_coords(self): unify_time_units(list_of_cubes) self._common(expected, list_of_cubes) - @tests.skip_data + @_shared_utils.skip_data def test_no_time_coord_in_cubes(self): - path0 = tests.get_data_path(("PP", "aPPglob1", "global.pp")) - path1 = tests.get_data_path(("PP", "aPPglob1", "global_t_forecast.pp")) + path0 = _shared_utils.get_data_path(("PP", "aPPglob1", "global.pp")) + path1 = _shared_utils.get_data_path(("PP", "aPPglob1", "global_t_forecast.pp")) cube0 = iris.load_cube(path0) cube1 = iris.load_cube(path1) cubes = iris.cube.CubeList([cube0, cube1]) result = copy.copy(cubes) unify_time_units(result) - self.assertEqual(cubes, result) + assert cubes == result def test_time_coord_only_in_some_cubes(self): list_of_cubes = self.simple_1d_time_cubes() @@ -136,7 +133,3 @@ def test_units_dtype_int_float(self): cubelist = iris.cube.CubeList([cube0, cube1]) unify_time_units(cubelist) assert len(cubelist.concatenate()) == 1 - - -if __name__ == "__main__": - tests.main() diff --git a/pyproject.toml b/pyproject.toml index b849dae9e6..74e514ad20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -134,6 +134,7 @@ version_scheme = "release-branch-semver" [tool.pytest.ini_options] addopts = "-ra --durations=25" +required_plugins = "pytest-mock" testpaths = "lib/iris" [tool.coverage.run] diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index a67480e8a2..97972bfa1f 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 372c3b278b46d5c658024f7b6b47d7b92266bb7ca5a25b0eb4f67e055b8a02a7 +# input_hash: 3a1bed2476064df92c4edecb4c0b462e6b4ecaa37082bd1ee61a2502ff4671a1 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 @@ -57,7 +57,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#60 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_0.conda#540296f0ce9d3352188c15a89b30b9ac +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -117,17 +117,17 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.co https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.15-h4a871b0_2_cpython.conda#98059097f62e97be9aed7ec904055825 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f @@ -177,7 +177,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda#128e74a4f8f4fef4dc5130a8bbccc15d +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py310hff52083_2.conda#4e8b2a2851668c8ad4d5360845281be9 @@ -200,7 +200,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py310ha75aee https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py310ha75aee5_1.conda#0d4c5c76ae5f5aac6f0be419963a19dd https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda#d5cd48392c67fb6849ba459c2c2b671f +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -239,21 +239,21 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda#7e574c7499bc41f92537634a23fed79a -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda#cb5c5ff12b37aded00d9aaa7b9a86a78 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py310hfeaa1f3_0.conda#1947280342c7259b82a707e38ebc212e -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/noarch/async-timeout-4.0.3-pyhd8ed1ab_0.conda#3ce482ec3066e6d809dbbb1d1679f215 @@ -268,7 +268,8 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py310hd6e36ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py310h2e9f774_0.conda#42a3ea3c283d930ae6d156b97ffe4740 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b @@ -308,7 +309,7 @@ https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py310ha75aee5_0.cond https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py310h89163eb_0.conda#cdc075f4328556adf4dde97b4f4a0532 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310hf462985_6.conda#b8ad2d561f4e0db4f09d06cc0e73e0b0 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h6063b07_4.conda#3108bfa76cd8a3ebc5546797106946e5 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -319,7 +320,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310hf462 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py310h5eaa309_0.conda#ca4d935c1715f95b6e86846ad1675a2b https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 @@ -338,3 +339,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e + diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 60a47736e4..1304a69343 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 58de0176aff465b8a68544e32553b8c5648f581ece5d2c4df0d333dd456ea851 +# input_hash: 35b01abef89f7af6fc6928cd3e505497158c7209f4f0efed8da35047fdabdc86 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 @@ -57,7 +57,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#60 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_0.conda#540296f0ce9d3352188c15a89b30b9ac +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -117,17 +117,17 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.co https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.10-hc5c86c4_3_cpython.conda#9e1ad55c87368e662177661a998feed5 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f @@ -177,7 +177,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda#128e74a4f8f4fef4dc5130a8bbccc15d +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py311h38be061_2.conda#733b481d20ff260a34f2b0003ff4fbb3 @@ -201,7 +201,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py311h9ecbd0 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py311h9ecbd09_1.conda#abeb54d40f439b86f75ea57045ab8496 https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda#d5cd48392c67fb6849ba459c2c2b671f +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -240,21 +240,21 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda#7e574c7499bc41f92537634a23fed79a -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda#cb5c5ff12b37aded00d9aaa7b9a86a78 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py311h49e9ac3_0.conda#2bd3d0f839ec0d1eaca817c9d1feb7c2 -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py311h9ecbd09_0.conda#d9c23163e7ac5f8926372c7d792a996f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py311h2dc5d0c_0.conda#4f0fa0019a6e7be77db3609a707a4581 @@ -269,7 +269,8 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py311h71ddf71_0.cond https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py311h0f98d5a_0.conda#22531205a97c116251713008d65dfefd -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b @@ -308,7 +309,7 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h9f3472d_6.conda#ac7dc7f70f8d2c1d96ecb7e4cb196498 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h6063b07_4.conda#3108bfa76cd8a3ebc5546797106946e5 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -320,7 +321,7 @@ https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py311h7158b74_209.conda#011801a68c022cf9692a4567d84678ca https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py311h7db5c69_0.conda#20ba399d57a2b5de789a5b24341481a1 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py311hc8241c7_209.conda#13fdaae5c7c5c76089ca76f63b287ef5 @@ -337,3 +338,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e + diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 99dc274e80..cbec79a901 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: c193458a42ce9c0214cd77bd4813343270edb438eceaf46d40cf7ea29a433b56 +# input_hash: 8cb273c57f190b95e7db1b3aae01b38ca08c48334bd8a71035d82f412ddd84bc @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 @@ -57,7 +57,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#60 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_0.conda#540296f0ce9d3352188c15a89b30b9ac +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -117,17 +117,17 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.co https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_0.conda#c4cb444844615e1cd4c9d989f770bcc5 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f @@ -177,7 +177,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.2-ha7bfdaf_0.conda#128e74a4f8f4fef4dc5130a8bbccc15d +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py312h7900ff3_2.conda#fddd3092f921be8e01b18f2a0266d98f @@ -201,7 +201,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda#d5cd48392c67fb6849ba459c2c2b671f +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -240,21 +240,21 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.c https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.2-default_hb5137d0_1.conda#7e574c7499bc41f92537634a23fed79a -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.2-default_h9c6a7e4_1.conda#cb5c5ff12b37aded00d9aaa7b9a86a78 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 @@ -269,7 +269,8 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py312h58c1407_0.cond https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b @@ -308,7 +309,7 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6 https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py312hc0a28a1_6.conda#fa4853d25b6fbfef5eb7b3e1b5616dd5 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h6063b07_4.conda#3108bfa76cd8a3ebc5546797106946e5 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a @@ -320,7 +321,7 @@ https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py312hc73667e_209.conda#e2967eddf4ea06a8b645da9967f370be https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py312hc8241c7_209.conda#1354402d09a8614821d6d3c13d826863 @@ -337,3 +338,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e + diff --git a/requirements/py310.yml b/requirements/py310.yml index f7285938f6..d81d4c0d42 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -44,6 +44,7 @@ dependencies: - psutil - pytest - pytest-cov + - pytest-mock - pytest-xdist - requests diff --git a/requirements/py311.yml b/requirements/py311.yml index e6f5e62a2b..b12c46c87f 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -44,6 +44,7 @@ dependencies: - psutil - pytest - pytest-cov + - pytest-mock - pytest-xdist - requests diff --git a/requirements/py312.yml b/requirements/py312.yml index b16f25b501..74277e417f 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -44,6 +44,7 @@ dependencies: - psutil - pytest - pytest-cov + - pytest-mock - pytest-xdist - requests From e743a6795a46a9d5c6e808ead7561f4092845fb2 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:14:51 +0000 Subject: [PATCH 14/74] Use the new re-entrant do-nothing script (#6062) * Use the new re-entrant do-nothing script. * Proper import error handling. * Always use git add. * More automation around CF standard names. * Missing bracket. * Less nebulous language. * Encourage copy-pasting from previous releases. * Clearer wording about release branches. * Clearer RTD versioning. * More precise installation tests. * Clearer rc branch naming. * Draw attention to deviation from conda-forge guidance. * Wait for CI to finish. * Include conda search command. * Comment on discussion. * Viva Engage. * Use a git command for recreating files. * Better SHA256 validation. * Add missing conda deactivate command. * Correct URL for watching conda-forge process. * Correct use of self.git_tag. * More guidance about when to change RC feedstock files. * Better advice for waiting for conda testing. * Raise correct message if `nothing` is not installed. Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> * Line breaks in CF standard names instructions. Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> * Don't use override-channels * Shout louder about NOT targeting the main branch. * More specificity in creating PRs, extra check for conda-forge RCs. * Corrected sha256 check. * What's New entry. --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --- .gitignore | 3 + docs/src/whatsnew/latest.rst | 4 + tools/release_do_nothing.py | 1525 ++++++++++++++++++---------------- 3 files changed, 807 insertions(+), 725 deletions(-) diff --git a/.gitignore b/.gitignore index 1b132cbd38..bff163db45 100644 --- a/.gitignore +++ b/.gitignore @@ -79,3 +79,6 @@ docs/iris_image_test_output/ # Files generated during test runs. lib/iris/tests/results/**/*.dot + +# Uses of SciTools-incubator/nothing. +**/.nothing diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7798e46481..7325630f22 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -84,6 +84,10 @@ This document explains the changes made to Iris for this release part of an ongoing effort to move from ``unittest`` to ``pytest``. (:pull:`6207`, part of :issue:`6212`) +#. `@trexfeathers`_, `@ESadek-MO`_ and `@HGWright`_ heavily re-worked + :doc:`/developers_guide/release_do_nothing` to be more thorough and apply + lessons learned from recent releases. (:pull:`6062`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index 79f26f3409..624e3372b1 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -10,879 +10,954 @@ """ from datetime import datetime -from enum import Enum +from enum import IntEnum from pathlib import Path import re -from sys import stderr -from time import sleep import typing +try: + from nothing import Progress +except ImportError: + install_message = ( + "This script requires the `nothing` package to be installed:\n" + "pip install git+https://github.com/SciTools-incubator/nothing.git" + ) + raise ImportError(install_message) + + +class IrisRelease(Progress): + class ReleaseTypes(IntEnum): + MAJOR = 0 + MINOR = 1 + PATCH = 2 + + github_user: str = None + release_type: ReleaseTypes = None + git_tag: str = None # v1.2.3rc0 + first_in_series: bool = None + sha256: str = None + + @classmethod + def get_cmd_description(cls) -> str: + return "Do-nothing workflow for the Iris release process." + + @classmethod + def get_steps(cls) -> list[typing.Callable[..., None]]: + return [ + cls.get_github_user, + cls.get_release_type, + cls.get_release_tag, + cls.check_release_candidate, + cls.check_first_in_series, + cls.update_standard_names, + cls.check_deprecations, + cls.create_release_branch, + cls.finalise_whats_new, + cls.cut_release, + cls.check_rtd, + cls.check_pypi, + cls.update_conda_forge, + cls.update_links, + cls.twitter_announce, + cls.merge_back, + ] + + def get_github_user(self): + def validate(input_user: str) -> str | None: + if not re.fullmatch(r"[a-zA-Z0-9-]+", input_user): + self.report_problem("Invalid GitHub username. Please try again ...") + else: + return input_user + + message = ( + "Please input your GitHub username.\n" + "This is used in the URLs for creating pull requests." + ) + self.set_value_from_input( + key="github_user", + message=message, + expected_inputs="Username", + post_process=validate, + ) + self.print(f"GitHub username = {self.github_user}") + + def get_release_type(self): + def validate(input_value: str) -> IrisRelease.ReleaseTypes | None: + try: + return self.ReleaseTypes(int(input_value)) + except ValueError: + self.report_problem("Invalid release type. Please try again ...") + + self.set_value_from_input( + key="release_type", + message="What type of release are you preparing?\nhttps://semver.org/", + expected_inputs=f"Choose a number {tuple(self.ReleaseTypes)}", + post_process=validate, + ) + self.print(f"{repr(self.release_type)} confirmed.") + + def get_release_tag(self): + # TODO: automate using setuptools_scm. + + def validate(input_tag: str) -> str | None: + # TODO: use the packaging library? + version_mask = r"v\d+\.\d+\.\d+\D*.*" + regex_101 = "https://regex101.com/r/dLVaNH/1" + if re.fullmatch(version_mask, input_tag) is None: + problem_message = ( + "Release tag does not match the input mask:\n" + f"{version_mask}\n" + f"({regex_101})" + ) + self.report_problem(problem_message) + else: + return input_tag # v1.2.3rc0 -class ReleaseTypes(Enum): - MAJOR = 0 - MINOR = 1 - PATCH = 2 + message = ( + "Input the release tag you are creating today, including any " + "release " + "candidate suffix.\n" + "https://semver.org/\n" + "https://scitools-iris.readthedocs.io/en/latest/developers_guide" + "/release.html?highlight=candidate#release-candidate" + ) + self.set_value_from_input( + key="git_tag", + message=message, + expected_inputs="e.g. v1.2.3rc0", + post_process=validate, + ) + class Strings(typing.NamedTuple): + series: str + branch: str + release: str -valid_release_types = typing.Literal["major", "minor", "patch"] + @property + def strings(self) -> Strings: + series = ".".join(self.git_tag.split(".")[:2]) # v1.2 + return self.Strings( + series=series, + branch=series + ".x", # v1.2.x + release=self.git_tag[1:], # 1.2.3rc0 + ) + @property + def is_release_candidate(self) -> bool: + return "rc" in self.git_tag -class ReleaseStrings: - """An easy way to pass the various flavours of release string between functions.""" + def check_release_candidate(self): + message = "Checking tag for release candidate: " + if self.is_release_candidate: + message += "DETECTED\nThis IS a release candidate." + else: + message += "NOT DETECTED\nThis IS NOT a release candidate." + self.print(message) - def __init__(self, input_tag: str): - version_mask = r"v\d+\.\d+\.\d+\D*.*" - regex_101 = "https://regex101.com/r/dLVaNH/1" - if re.fullmatch(version_mask, input_tag) is None: + if self.release_type == self.ReleaseTypes.PATCH and self.is_release_candidate: message = ( - "Release tag does not match the input mask:\n" - f"{version_mask}\n" - f"({regex_101})" + "Release candidates are not expected for PATCH releases. " + "Are you sure you want to continue?" ) - raise ValueError(message) - else: - self.tag = input_tag # v1.2.3rc0 + if self.get_input(message, "y / [n]").casefold() != "y".casefold(): + exit() - self.series = ".".join(self.tag.split(".")[:2]) # v1.2 - self.branch = self.series + ".x" # v1.2.x - self.release = self.tag[1:] # 1.2.3rc0 + def check_first_in_series(self): + if self.release_type != self.ReleaseTypes.PATCH: + message = ( + f"Is this the first release in the {self.strings.series} " + f"series, including any release candidates?" + ) + self.set_value_from_input( + key="first_in_series", + message=message, + expected_inputs="y / n", + post_process=lambda x: x.casefold() == "y".casefold(), + ) + if self.first_in_series: + self.print("First in series confirmed.") + if not self.is_release_candidate: + message = ( + "The first release in a series is expected to be a " + "release candidate, but this is not. Are you sure you " + "want to continue?" + ) + if self.get_input(message, "y / [n]").casefold() != "y".casefold(): + exit() + else: + self.print("Existing series confirmed.") + + def _create_pr( + self, + base_org: str, + base_repo: str, + base_branch: str, + head_branch: str + ) -> None: + """Instruct user to create a PR with a specified base and head. + + Parameters + ---------- + base_org : str + The name of the GitHub organisation that owns the `base_repo` that + owns the `base_branch`. + base_repo : str + The name of the GitHub repository (within the `base_org`) that owns + the `base_branch`. + base_branch : str + The name of the branch (within the `base_repo`) that will be the + base of the PR. + head_branch : str + The name of the branch (within the user's fork of `base_repo`) that + will be the head of the PR. + """ + repo_url = f"https://github.com/{base_org}/{base_repo}" + diff_url = f"{base_branch}...{self.github_user}:{base_repo}:{head_branch}" + full_url = f"{repo_url}/compare/{diff_url}" + + pr_message = ( + "Create a Pull Request for your changes by visiting this URL " + "and clicking `Create pull request`:\n" + f"{full_url}" + ) + self.wait_for_done(pr_message) + + def update_standard_names(self): + if self.first_in_series: + working_branch = self.strings.branch + ".standard_names" + self._delete_local_branch(working_branch) + message = ( + "Checkout a local branch from the official ``main`` branch.\n" + "git fetch upstream;\n" + f"git checkout upstream/main -b {working_branch};" + ) + self.wait_for_done(message) + url = "https://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml" + file = Path(__file__).parents[1] / "etc" / "cf-standard-name-table.xml" + message = ( + "Update the CF standard names table to the latest version:\n" + f'wget "{url}" -O {file};\n' + f"git add {file};\n" + "git commit -m 'Update CF standard names table.';\n" + f"git push -u origin {working_branch};" + ) + self.wait_for_done(message) -class WhatsNewRsts: - """An easy way to pass the paths of various What's New files between functions.""" + self._create_pr( + base_org="SciTools", + base_repo="iris", + base_branch="main", + head_branch=working_branch, + ) + message = "Work with the development team to get the PR merged." + self.wait_for_done(message) - def __init__(self, release_strings: ReleaseStrings): - src_dir = Path(__file__).parents[1] / "docs" / "src" - whatsnew_dir = src_dir / "whatsnew" - assert whatsnew_dir.is_dir() + def check_deprecations(self): + if self.release_type == self.ReleaseTypes.MAJOR: + message = ( + "This is a MAJOR release - be sure to finalise all deprecations " + "and FUTUREs from previous releases, via a new Pull Request.\n" + "https://scitools-iris.readthedocs.io/en/latest/developers_guide" + "/contributing_deprecations.html" + ) + self.wait_for_done(message) - self.latest = whatsnew_dir / "latest.rst" - self.release = whatsnew_dir / (release_strings.series[1:] + ".rst") - self.index = whatsnew_dir / "index.rst" - self.template = self.latest.with_suffix(".rst.template") + def create_release_branch(self): + # TODO: automate + print("Release branch management ...") + if self.first_in_series: + message = ( + "Visit https://github.com/SciTools/iris and create the" + f"``{self.strings.branch}`` release branch from ``main``." + ) + self.wait_for_done(message) -def _break_print(message: str): - print() - print(message) - # Help with flow/visibility by waiting 1secs before proceeding. - sleep(1) + else: + message = ( + "Cherry-pick any specific commits that are needed from ``main`` " + f"onto {self.strings.branch} , to get the CI passing.\n" + "E.g. a new dependency pin may have been introduced since " + f"{self.strings.branch} was last updated from ``main``.\n" + "DO NOT squash-merge - want to preserve the original commit " + "SHA's." + ) + self.wait_for_done(message) + @staticmethod + def _delete_local_branch(branch_name: str): + message = ( + "Before the next step, avoid a name clash by deleting any " + "existing local branch, if one exists.\n" + f"git branch -D {branch_name};\n" + f"git push -d origin {branch_name};" + ) + IrisRelease.wait_for_done(message) -def _mark_section(section_number: int): - _break_print(f"SECTION {section_number} ...") + class WhatsNewRsts(typing.NamedTuple): + latest: Path + release: Path + index: Path + template: Path + @property + def whats_news(self) -> WhatsNewRsts: + src_dir = Path(__file__).parents[1] / "docs" / "src" + whatsnew_dir = src_dir / "whatsnew" + assert whatsnew_dir.is_dir() + latest = whatsnew_dir / "latest.rst" -def _get_input(message: str, expected_inputs: str) -> str: - _break_print(message) - return input(expected_inputs + " : ") + return self.WhatsNewRsts( + latest=latest, + release=whatsnew_dir / (self.strings.series[1:] + ".rst"), + index=whatsnew_dir / "index.rst", + template=latest.with_suffix(".rst.template"), + ) + def finalise_whats_new(self): + self.print("What's New finalisation ...") -def _wait_for_done(message: str): - _break_print(message) - done = False - while not done: - done = ( - input("Step complete? y / [n] : ").casefold() == "y".casefold() + working_branch = self.strings.branch + ".updates" + self._delete_local_branch(working_branch) + message = ( + f"Checkout a local branch from the official {self.strings.branch} " + f"branch.\n" + "git fetch upstream;\n" + f"git checkout upstream/{self.strings.branch} -b " + f"{working_branch};" ) + self.wait_for_done(message) + # TODO: automate + if self.first_in_series: + message = ( + "'Cut' the What's New for the release.\n" + f"git mv {self.whats_news.latest.absolute()} " + f"{self.whats_news.release.absolute()};" + ) + self.wait_for_done(message) -def _report_problem(message: str): - print(message, file=stderr) - # To ensure correct sequencing of messages. - sleep(0.5) + message = ( + f"In {self.whats_news.index.absolute()}:\n" + f"Replace references to {self.whats_news.latest.name} with " + f"{self.whats_news.release.name}" + ) + self.wait_for_done(message) + self.print(f"What's New file path = {self.whats_news.release}") -def get_release_type() -> ReleaseTypes: - release_type = None - release_types_str = " ".join( - [f"{m.name}={m.value}" for m in ReleaseTypes.__members__.values()] - ) - message = "What type of release are you preparing?\nhttps://semver.org/" - while release_type is None: - input_type = _get_input(message, release_types_str) - try: - release_type = ReleaseTypes(int(input_type)) - except ValueError: - _report_problem("Invalid release type. Please try again ...") - _break_print(f"{release_type} confirmed.") - return release_type - - -def get_release_tag() -> ReleaseStrings: - # TODO: automate using setuptools_scm. - release_strings = None - message = ( - "Input the release tag you are creating today, including any release " - "candidate suffix.\n" - "https://semver.org/\n" - "https://scitools-iris.readthedocs.io/en/latest/developers_guide/release.html?highlight=candidate#release-candidate" - ) - while release_strings is None: - input_tag = _get_input(message, "e.g. v1.2.3rc0") - try: - release_strings = ReleaseStrings(input_tag) - except ValueError as err: - _report_problem(str(err)) - return release_strings - - -def check_release_candidate( - release_type: ReleaseTypes, release_strings: ReleaseStrings -) -> bool: - is_release_candidate = "rc" in release_strings.tag - - message = "Checking tag for release candidate: " - if is_release_candidate: - message += "DETECTED\nThis IS a release candidate." - else: - message += "NOT DETECTED\nThis IS NOT a release candidate." - _break_print(message) - - if release_type == ReleaseTypes.PATCH and is_release_candidate: - message = ( - "Release candidates are not expected for PATCH releases. " - "Are you sure you want to continue?" - ) - if _get_input(message, "y / [n]").casefold() != "y".casefold(): - exit() - return is_release_candidate - - -def check_first_in_series( - release_type: ReleaseTypes, - release_strings: ReleaseStrings, - is_release_candidate: bool, -) -> bool: - first_in_series = False - if release_type != ReleaseTypes.PATCH: - message = ( - "Have there been any prior releases in the " - f"{release_strings.series} series, including release candidates?" - ) - first_in_series = ( - _get_input(message, "[y] / n").casefold() == "n".casefold() - ) - if first_in_series: - _break_print("First in series confirmed.") - if not is_release_candidate: - message = ( - "The first release in a series is expected to be a " - "release candidate, but this is not. Are you sure you " - "want to continue?" + if not self.release_type == self.ReleaseTypes.PATCH: + whatsnew_title = ( + f"{self.strings.series} ({datetime.today().strftime('%d %b %Y')}" + ) + if self.is_release_candidate: + whatsnew_title += " [release candidate]" + whatsnew_title += ")" + # TODO: automate + message = ( + f"In {self.whats_news.release.name}: set the page title to:\n" + f"{whatsnew_title}\n" + ) + if not self.is_release_candidate: + message += ( + "\nBe sure to remove any existing mentions of release " + "candidate from the title.\n" ) - if _get_input(message, "y / [n]").casefold() != "y".casefold(): - exit() - else: - _break_print("Existing series confirmed.") - return first_in_series - - -def update_standard_names(first_in_series: bool) -> None: - if first_in_series: - message = ( - "Update the file ``etc/cf-standard-name-table.xml`` to the latest CF " - "standard names, via a new Pull Request.\n" - "(This is used during build to automatically generate the sourcefile " - "``lib/iris/std_names.py``).\n" - "Latest standard names:\n" - 'wget "https://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml";' - ) - _wait_for_done(message) + self.wait_for_done(message) + message = ( + f"In {self.whats_news.release.name}: ensure the page title " + "underline is the exact same length as the page title text." + ) + self.wait_for_done(message) -def check_deprecations(release_type: ReleaseTypes) -> None: - if release_type == ReleaseTypes.MAJOR: - message = ( - "This is a MAJOR release - be sure to finalise all deprecations " - "and FUTUREs from previous releases, via a new Pull Request.\n" - "https://scitools-iris.readthedocs.io/en/latest/developers_guide/contributing_deprecations.html" - ) - _wait_for_done(message) - + dropdown_title = f"\n{self.strings.series} Release Highlights\n" + message = ( + f"In {self.whats_news.release.name}: set the sphinx-design " + f"dropdown title to:{dropdown_title}" + ) + self.wait_for_done(message) -def _delete_local_branch(branch_name: str): - message = ( - "Before the next step, avoid a name clash by deleting any " - "existing local branch, if one exists.\n" - f"git branch -D {branch_name};\n" - f"git push -d origin {branch_name};" - ) - _wait_for_done(message) + message = ( + f"Review {self.whats_news.release.name} to ensure it is a good " + f"reflection of what is new in {self.strings.series}.\n" + "I.e. all significant work you are aware of should be " + "present, such as a major dependency pin, a big new feature, " + "a known performance change. You can not be expected to know " + "about every single small change." + ) + self.wait_for_done(message) + message = ( + "Work with the development team to populate the Release " + f"Highlights dropdown section at the top of " + f"{self.whats_news.release.name}." + ) + self.wait_for_done(message) -def create_release_branch( - release_strings: ReleaseStrings, first_in_series: bool -) -> None: - # TODO: automate + else: + message = ( + "Create a patch dropdown section at the top of " + f"{self.whats_news.release.name}.\n" + f"See {self.whats_news.template} for how this should be written." + ) + self.wait_for_done(message) - _break_print("Release branch management ...") + if self.first_in_series: + # TODO: automate + message = ( + "Remove the What's New template file.\n" + f"git rm {self.whats_news.template.absolute()};" + ) + self.wait_for_done(message) - if first_in_series: message = ( - "Visit https://github.com/SciTools/iris and create the" - f"``{release_strings.branch}`` release branch from ``main``." + "Commit and push all the What's New changes.\n" + f"git add {self.whats_news.release.absolute()};\n" + f"git add {self.whats_news.index.absolute()};\n" + f'git commit -m "Whats new updates for {self.git_tag} .";\n' + f"git push -u origin {working_branch};" ) - _wait_for_done(message) + self.wait_for_done(message) - else: + self._create_pr( + base_org="SciTools", + base_repo="iris", + base_branch=self.strings.branch, + head_branch=working_branch, + ) message = ( - "Cherry-pick any specific commits that are needed from ``main`` " - f"onto {release_strings.branch} , to get the CI passing.\n" - "E.g. a new dependency pin may have been introduced since " - f"{release_strings.branch} was last updated from ``main``.\n" - "DO NOT squash-merge - want to preserve the original commit SHA's." + "Work with the development team to get the PR merged.\n" + "Make sure the documentation is previewed during this process.\n" + "Make sure you are NOT targeting the `main` branch." ) - _wait_for_done(message) + self.wait_for_done(message) + def cut_release(self): + self.print("The release ...") -def finalise_whats_new( - release_type: ReleaseTypes, - release_strings: ReleaseStrings, - is_release_candidate: bool, - first_in_series: bool, -) -> WhatsNewRsts: - _break_print("What's New finalisation ...") - - working_branch = release_strings.branch + ".updates" - _delete_local_branch(working_branch) - message = ( - f"Checkout a local branch from the official {release_strings.branch} branch.\n" - "git fetch upstream;\n" - f"git checkout upstream/{release_strings.branch} -b " - f"{working_branch};" - ) - _wait_for_done(message) - - rsts = WhatsNewRsts(release_strings) + message = ( + "Visit https://github.com/SciTools/iris/releases/new to open " + "a blank new-release web page." + ) + self.wait_for_done(message) - # TODO: automate - if first_in_series: message = ( - "'Cut' the What's New for the release.\n" - f"git mv {rsts.latest.absolute()} {rsts.release.absolute()};" + f"Select {self.strings.branch} as the Target.\n" + f"Input {self.git_tag} as the new tag to create, and also as " + "the Release title.\n" + "Make sure you are NOT targeting the `main` branch." ) - _wait_for_done(message) + self.wait_for_done(message) message = ( - f"In {rsts.index.absolute()}:\n" - f"Replace references to {rsts.latest.name} with {rsts.release.name}" + "Populate the main text box.\n" + "- Usual approach: copy from the last similar release, and " + "THOROUGHLY check for all references to the old release - change " + "these.\n" + "- Alternatively: craft a new release description from scratch. " + "Be sure to mention the What's New entry, conda-forge and PyPI; " + "note that you will need to return later to make these into " + "links.\n" ) - _wait_for_done(message) + self.wait_for_done(message) - _break_print(f"What's New file path = {rsts.release}") + if self.is_release_candidate: + message = ( + "This is a release candidate - include the following " + "instructions for installing with conda or pip:\n" + f"conda install -c conda-forge/label/rc_iris iris={self.strings.release}\n" + f"pip install scitools-iris=={self.strings.release}" + ) + self.wait_for_done(message) - if not release_type == ReleaseTypes.PATCH: - whatsnew_title = f"{release_strings.series} ({datetime.today().strftime('%d %b %Y')})" - if is_release_candidate: - whatsnew_title += " [release candidate]" - # TODO: automate - message = f"In {rsts.release.name}: set the page title to:\n{whatsnew_title}\n" - if not is_release_candidate: - message += ( - "\nBe sure to remove any existing mentions of release " - "candidate from the title.\n" + message = ( + "This is a release candidate - tick the box to set this as a " + "pre-release." ) - _wait_for_done(message) + self.wait_for_done(message) - message = ( - f"In {rsts.release.name}: ensure the page title underline is " - "the exact same length as the page title text." - ) - _wait_for_done(message) + else: + message = "Tick the box to set this as the latest release." + self.wait_for_done(message) - dropdown_title = f"\n{release_strings.series} Release Highlights\n" - message = ( - f"In {rsts.release.name}: set the sphinx-design dropdown title to:{dropdown_title}" - ) - _wait_for_done(message) + message = "Click: Publish release !" + self.wait_for_done(message) message = ( - f"Review {rsts.release.name} to ensure it is a good reflection of " - f"what is new in {release_strings.series}." + "The CI will now run against this new tag, including automatically " + "publishing to PyPI." ) - _wait_for_done(message) + self.print(message) + url = "https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml" message = ( - "Work with the development team to populate the Release " - f"Highlights dropdown section at the top of {rsts.release.name}." + f"Visit {url} to monitor the building, testing and publishing of " + "the Iris sdist and binary wheel to PyPI." ) - _wait_for_done(message) + self.wait_for_done(message) - else: - message = ( - "Create a patch dropdown section at the top of " - f"{rsts.release.name}.\n" - f"See {rsts.template} for how this should be written." - ) - _wait_for_done(message) + def check_rtd(self): + self.print("Read the Docs checks ...") - if first_in_series: - # TODO: automate message = ( - "Remove the What's New template file.\n" - f"git rm {rsts.template.absolute()};" + "Visit https://readthedocs.org/projects/scitools-iris/versions/ " + "and make sure you are logged in." ) - _wait_for_done(message) + self.wait_for_done(message) - message = ( - "Commit and push all the What's New changes.\n" - f"git commit -am \"What's new updates for {release_strings.tag} .\";\n" - f"git push -u origin {working_branch};" - ) - _wait_for_done(message) + message = f"Set {self.git_tag} to Active, un-Hidden." + self.wait_for_done(message) - message = ( - f"Follow the Pull Request process to get {working_branch} " - f"merged into upstream/{release_strings.branch} .\n" - "Make sure the documentation is previewed during this process." - ) - _wait_for_done(message) - - return rsts - - -def cut_release( - release_strings: ReleaseStrings, is_release_candidate: bool -) -> None: - _break_print("The release ...") + message = f"Set {self.strings.branch} to Active, Hidden." + self.wait_for_done(message) - message = ( - "Visit https://github.com/SciTools/iris/releases/new to open " - "a blank new-release web page." - ) - _wait_for_done(message) - - message = ( - f"Select {release_strings.branch} as the Target.\n" - f"Input {release_strings.tag} as the new tag to create, and also as " - "the Release title." - ) - _wait_for_done(message) - - message = ( - "Craft an appropriate release description in the main text box.\n" - "Be sure to mention the What's New entry, conda-forge and PyPI - you " - "will need to return later to make these into links.\n" - "Be careful to change the appropriate words if copying from a " - "previous release description." - ) - _wait_for_done(message) - - if is_release_candidate: message = ( - "This is a release candidate - include the following instructions " - "for installing with conda or pip:\n" - f"conda install -c conda-forge/label/rc_iris iris={release_strings.release}\n" - f"pip install scitools-iris=={release_strings.release}" + "Keep only the latest 2 branch doc builds active - " + f"'{self.strings.branch}' and the previous one - deactivate older " + "ones." ) - _wait_for_done(message) + self.wait_for_done(message) message = ( - "This is a release candidate - tick the box to set this as a " - "pre-release." + f"Visit https://scitools-iris.readthedocs.io/en/{self.git_tag} " + "to confirm:\n\n" + "- The docs have rendered.\n" + "- The version badge in the top left reads:\n" + f" 'version (archived) | {self.git_tag}'\n" + " (this demonstrates that setuptools_scm has worked correctly).\n" + "- The What's New looks correct.\n" + f"- {self.git_tag} is available in RTD's version switcher.\n\n" + "NOTE: the docs can take several minutes to finish building." ) - _wait_for_done(message) - - else: - message = "Tick the box to set this as the latest release." - _wait_for_done(message) - - message = "Click: Publish release !" - _wait_for_done(message) - - message = ( - "The CI will now run against this new tag, including automatically " - "publishing to PyPI." - ) - _break_print(message) - - url = "https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml" - message = ( - f"Visit {url} to monitor the building, testing and publishing of " - "the Iris sdist and binary wheel to PyPI." - ) - _wait_for_done(message) - - -def check_rtd( - release_strings: ReleaseStrings, is_release_candidate: bool -) -> None: - _break_print("Read the Docs checks ...") - - message = ( - "Visit https://readthedocs.org/projects/scitools-iris/versions/ and " - "make sure you are logged in." - ) - _wait_for_done(message) - - message = f"Set {release_strings.tag} to Active, un-Hidden." - _wait_for_done(message) - - message = f"Set {release_strings.branch} to Active, Hidden." - _wait_for_done(message) - - message = ( - "Keep the latest 2 branch doc builds active - those formatted 0.0.x - " - "deactivate older ones." - ) - _wait_for_done(message) - - message = ( - f"Visit https://scitools-iris.readthedocs.io/en/{release_strings.tag} " - "to confirm:\n\n" - "- The docs have rendered.\n" - f"- The version badge in the top left reads: {release_strings.tag} .\n" - " (this demonstrates that setuptools_scm has worked correctly).\n" - "- The What's New looks correct.\n" - f"- {release_strings.tag} is available in RTD's version switcher.\n\n" - "NOTE: the docs can take several minutes to finish building." - ) - if not is_release_candidate: - message += ( - "- Selecting 'stable' in the version switcher also brings up the " - f"{release_strings.tag} render." - ) - _wait_for_done(message) - - message = ( - f"Visit https://scitools-iris.readthedocs.io/en/{release_strings.branch} " - "to confirm:\n\n" - "- The docs have rendered\n" - f"- The version badge in the top left includes: {release_strings.branch} .\n" - f"- {release_strings.branch} is NOT available in RTD's version switcher.\n\n" - "NOTE: the docs can take several minutes to finish building." - ) - _wait_for_done(message) - - -def check_pypi( - release_strings: ReleaseStrings, is_release_candidate: bool -) -> str: - _break_print("PyPI checks ...") - _break_print("If anything goes wrong, manual steps are in the documentation.") - - message = ( - "Confirm that the following URL is correctly populated:\n" - f"https://pypi.org/project/scitools-iris/{release_strings.release}/" - ) - _wait_for_done(message) - - message = ( - f"Confirm that {release_strings.release} is at the top of this page:\n" - "https://pypi.org/project/scitools-iris/#history" - ) - _wait_for_done(message) + if not self.is_release_candidate: + message += ( + "- Selecting 'stable' in the version switcher also brings up " + f"the {self.git_tag} render." + ) + self.wait_for_done(message) - if is_release_candidate: - message = ( - f"Confirm that {release_strings.release} is marked as a " - f"pre-release on this page:\n" - "https://pypi.org/project/scitools-iris/#history" - ) - else: message = ( - f"Confirm that {release_strings.release} is the tag shown on the " - "scitools-iris PyPI homepage:\n" - "https://pypi.org/project/scitools-iris/" + f"Visit https://scitools-iris.readthedocs.io/en/{self.strings.branch} " + "to confirm:\n\n" + "- The docs have rendered\n" + f"- The version badge in the top left includes: {self.strings.branch} .\n" + f"- {self.strings.branch} is NOT available in RTD's version switcher.\n\n" + "NOTE: the docs can take several minutes to finish building." ) - _wait_for_done(message) - - message = ( - f"Visit the below and click `view hashes` for the Source Distribution" - f"(`.tar.gz`):\n" - f"https://pypi.org/project/scitools-iris/{release_strings.release}#files\n" - ) - sha256 = _get_input(message, "Input the SHA256 hash") + self.wait_for_done(message) - message = ( - "Confirm that pip install works as expected:\n" - f"pip install scitools-iris=={release_strings.release};" - ) - _wait_for_done(message) - - return sha256 - - -def update_conda_forge( - release_strings: ReleaseStrings, is_release_candidate: bool, sha256: str -) -> None: - _break_print("conda-forge updates ...") + def check_pypi(self): + self.print("PyPI checks ...") + self.print("If anything goes wrong, manual steps are in the documentation.") - if not is_release_candidate: message = ( - "NOTE: after several hours conda-forge automation will create a " - "Pull Request against conda-forge/iris-feedstock (via the " - "regro-cf-autotick-bot). Quicker to sort it now, manually ..." + "Confirm that the following URL is correctly populated:\n" + f"https://pypi.org/project/scitools-iris/{self.strings.release}/" ) - _break_print(message) - - message = ( - "Make sure you have a GitHub fork of:\n" - "https://github.com/conda-forge/iris-feedstock" - ) - _wait_for_done(message) - - message = ( - "Make sure you have a local clone of your iris-feedstock fork.\n" - "`cd` into your clone." - ) - _wait_for_done(message) + self.wait_for_done(message) - if is_release_candidate: message = ( - "Visit the conda-forge feedstock branches page:\n" - "https://github.com/conda-forge/iris-feedstock/branches" + f"Confirm that {self.strings.release} is at the top of this page:\n" + "https://pypi.org/project/scitools-iris/#history" ) - _wait_for_done(message) + self.wait_for_done(message) + + if self.is_release_candidate: + message = ( + f"Confirm that {self.strings.release} is marked as a " + f"pre-release on this page:\n" + "https://pypi.org/project/scitools-iris/#history" + ) + else: + message = ( + f"Confirm that {self.strings.release} is the tag shown on the " + "scitools-iris PyPI homepage:\n" + "https://pypi.org/project/scitools-iris/" + ) + self.wait_for_done(message) + + def validate(sha256_string: str) -> str: + valid = True + try: + _ = int(sha256_string, 16) + except ValueError: + valid = False + valid = valid and len(sha256_string) == 64 + + if not valid: + self.report_problem("Invalid SHA256 hash. Please try again ...") + else: + return sha256_string message = ( - "Find the release candidate branch - " - "`rc`/`release-candidate`/similar.\n" + f"Visit the below and click `view hashes` for the Source Distribution" + f"(`.tar.gz`):\n" + f"https://pypi.org/project/scitools-iris/{self.strings.release}#files\n" ) - rc_branch = _get_input( - message, - "Input the name of the release candidate branch" + self.set_value_from_input( + key="sha256", + message=message, + expected_inputs="Input the SHA256 hash", + post_process=validate, ) message = ( - f"Is the latest commit on {rc_branch} over 1 month ago?" + "Confirm that pip install works as expected:\n" + "conda create -y -n tmp_iris pip cf-units;\n" + "conda activate tmp_iris;\n" + f"pip install scitools-iris=={self.strings.release};\n" + 'python -c "import iris; print(iris.__version__)";\n' + "conda deactivate;\n" + "conda remove -n tmp_iris --all;\n" ) - archive_rc = None - while archive_rc is None: - age_check = _get_input(message, "y / n") - if age_check.casefold() == "y".casefold(): - archive_rc = True - elif age_check.casefold() == "n".casefold(): - archive_rc = False - else: - _report_problem("Invalid entry. Please try again ...") + self.wait_for_done(message) - if archive_rc: - # We chose this odd handling of release candidate branches because - # a persistent branch will gradually diverge as `main` receives - # automatic and manual maintenance (where recreating these on - # another branch is often beyond Iris dev expertise). Advised - # practice from conda-forge is also liable to evolve over time. - # Since there is no benefit to a continuous Git history on the - # release candidate branch, the simplest way to keep it aligned - # with best practice is to regularly create a fresh branch from - # `main`. + def update_conda_forge(self): + self.print("conda-forge checks ...") - date_string = datetime.today().strftime("%Y%m%d") + if not self.is_release_candidate: message = ( - f"Archive the {rc_branch} branch by appending _{date_string} " - "to its name.\n" - f"e.g. rc_{date_string}\n\n" - f"({__file__} includes an explanation of this in the comments)." + "NOTE: after several hours conda-forge automation will " + "create a " + "Pull Request against conda-forge/iris-feedstock (via the " + "regro-cf-autotick-bot). Quicker to sort it now, manually ..." ) - _wait_for_done(message) + self.print(message) + + message = ( + "Make sure you have a GitHub fork of:\n" + "https://github.com/conda-forge/iris-feedstock" + ) + self.wait_for_done(message) + if self.is_release_candidate: message = ( - "Follow the latest conda-forge guidance for creating a new " - "release candidate branch from the `main` branch:\n" - "https://conda-forge.org/docs/maintainer/knowledge_base.html#pre-release-builds\n\n" - "Config file(s) should point to the `rc_iris` label.\n" + "Visit the conda-forge feedstock branches page:\n" + "https://github.com/conda-forge/iris-feedstock/branches" ) - rc_branch = _get_input(message, "Input the name of your new branch") + self.wait_for_done(message) - upstream_branch = rc_branch - else: - upstream_branch = "main" + message = ( + "Find the release candidate branch - typical names:\n" + "`rc` / `release-candidate` / similar .\n" + ) + rc_branch = self.get_input( + message, + "Input the name of the release candidate branch" + ) - # TODO: automate - message = ( - "Checkout a new branch for the conda-forge changes for this release:\n" - "git fetch upstream;\n" - f"git checkout upstream/{upstream_branch} -b {release_strings.tag};\n" - ) - _wait_for_done(message) - - message = ( - "Update ./recipe/meta.yaml:\n\n" - f"- The version at the very top of the file: {release_strings.release}\n" - f"- The sha256 hash: {sha256}\n" - "- Requirements: align the packages and pins with those in the Iris repo\n" - "- Maintainers: update with any changes to the dev team\n" - "- MAKE SURE everything else is correct - plenty of other things " - "might need one-off changes.\n" - ) - _wait_for_done(message) - - # TODO: automate - message = ( - "No other file normally needs changing in iris-feedstock, so push up " - "the changes to prepare for a Pull Request:\n" - f'git commit -am "Recipe updates for {release_strings.tag} .";\n' - f"git push -u origin {release_strings.tag};" - ) - _wait_for_done(message) + message = ( + f"Is the latest commit on {rc_branch} over 1 month ago?" + ) + archive_rc = None + while archive_rc is None: + valid_entries = ["y", "n"] + age_check = self.get_input(message, " / ".join(valid_entries)) + match = [age_check.casefold() == e.casefold() for e in valid_entries] + if not any(match): + self.report_problem("Invalid entry. Please try again ...") + else: + archive_rc = match[0] + + if archive_rc: + # We chose this odd handling of release candidate branches because + # a persistent branch will gradually diverge as `main` receives + # automatic and manual maintenance (where recreating these on + # another branch is often beyond Iris dev expertise). Advised + # practice from conda-forge is also liable to evolve over time. + # Since there is no benefit to a continuous Git history on the + # release candidate branch, the simplest way to keep it aligned + # with best practice is to regularly create a fresh branch from + # `main`. + + date_string = datetime.today().strftime("%Y%m%d") + message = ( + f"Archive the {rc_branch} branch by appending _" + f"{date_string} " + "to its name.\n" + f"e.g. rc_{date_string}\n\n" + f"({__file__} includes an explanation of this in the " + f"comments)." + ) + self.wait_for_done(message) - message = ( - f"Follow the Pull Request process to get {release_strings.tag} branch " - f"merged into upstream/{upstream_branch} .\n" - "Specific conda-forge guidance will be automatically given once the " - "PR is created." - ) - _wait_for_done(message) + message = ( + "Follow the latest conda-forge guidance for creating a new " + "release candidate branch from the `main` branch:\n" + "https://conda-forge.org/docs/maintainer/knowledge_base.html#pre-release-builds\n\n" + "If you need to change any feedstock files: a pull " + "request is coming in the the next steps so you can make " + "those changes at that point.\n\n" + "DEVIATION FROM GUIDANCE: config file(s) should point to " + "the `rc_iris` label (this is not the name that " + "conda-forge suggest).\n" + ) + rc_branch = self.get_input(message, "Input the name of your new branch") - message = ( - f"Confirm that {release_strings.release} appears in this list:\n" - "https://anaconda.org/conda-forge/iris/files" - ) - _wait_for_done(message) + upstream_branch = rc_branch + else: + upstream_branch = "main" - if not is_release_candidate: + # TODO: automate message = ( - f"Confirm that {release_strings.release} is displayed on this " - "page as the latest available:\n" - "https://anaconda.org/conda-forge/iris" + "Checkout a new branch for the conda-forge changes for this " + "release:\n" + "git fetch upstream;\n" + f"git checkout upstream/{upstream_branch} -b " + f"{self.git_tag};\n" ) - _wait_for_done(message) - - if is_release_candidate: - channel_command = " -c conda-forge/label/rc_iris " - else: - channel_command = " " - message = ( - "Confirm that conda (or mamba) install works as expected:\n" - f"conda create -n tmp_iris{channel_command}iris={release_strings.release};\n" - f"conda remove -n tmp_iris --all;" - ) - _wait_for_done(message) - - -def update_links(release_strings: ReleaseStrings) -> None: - _break_print("Link updates ...") - - message = ( - "Revisit the GitHub release:\n" - f"https://github.com/SciTools/iris/releases/tag/{release_strings.tag}\n" - "You have confirmed that Read the Docs, PyPI and conda-forge have all " - "updated correctly. Include the following links in the release " - "notes:\n\n" - f"https://scitools-iris.readthedocs.io/en/{release_strings.tag}/\n" - f"https://pypi.org/project/scitools-iris/{release_strings.release}/\n" - f"https://anaconda.org/conda-forge/iris?version={release_strings.release}\n" - ) - _wait_for_done(message) - - message = ( - "Update the release page in GitHub discussions, with the above links " - "and anything else appropriate.\n" - "https://github.com/SciTools/iris/discussions" - ) - _wait_for_done(message) + self.wait_for_done(message) + message = ( + "Update ./recipe/meta.yaml:\n\n" + f"- The version at the very top of the file: " + f"{self.strings.release}\n" + f"- The sha256 hash: {self.sha256}\n" + "- Requirements: align the packages and pins with those in the " + "Iris repo\n" + "- Maintainers: update with any changes to the dev team\n" + "- Skim read the entire file to see if anything else is out of" + "date, e.g. is the licence info still correct? Ask the lead " + "Iris developers if unsure.\n" + ) + self.wait_for_done(message) -def twitter_announce( - release_strings: ReleaseStrings, first_in_series: bool -) -> None: - message = ( - "Announce the release via https://twitter.com/scitools_iris, and any " - "other appropriate message boards (e.g. Yammer).\n" - "Any content used for the announcement should be stored in the " - "SciTools/twitter-scitools-iris GitHub repo.\n" - ) - if not first_in_series: - message += ( - f"Consider replying within an existing {release_strings.series} " - "announcement thread, if appropriate." + # TODO: automate + message = ( + "No other file normally needs changing in iris-feedstock, " + "so push up " + "the changes to prepare for a Pull Request:\n" + f"git add recipe/meta.yaml;\n" + f'git commit -m "Recipe updates for {self.git_tag} .";\n' + f"git push -u origin {self.git_tag};" + ) + self.wait_for_done(message) + + self._create_pr( + base_org="conda-forge", + base_repo="iris-feedstock", + base_branch=upstream_branch, + head_branch=self.git_tag, + ) + + if self.is_release_candidate: + readme_url = f"https://github.com/{self.github_user}/iris-feedstock/blob/{self.git_tag}/README.md" + rc_evidence = ( + "\n\nConfirm that conda-forge knows your changes are for the " + "release candidate channel by checking the below README file. " + "This should make multiple references to the `rc_iris` label:\n" + f"{readme_url}" + ) + else: + rc_evidence = "" + message = ( + "Follow the automatic conda-forge guidance for further populating " + f"your Pull Request.{rc_evidence}" ) - _wait_for_done(message) + self.wait_for_done(message) + message = "Work with your fellow feedstock maintainers to get the PR merged." + self.wait_for_done(message) -def update_citation( - release_strings: ReleaseStrings, is_release_candidate: bool -) -> None: - if not is_release_candidate: - src_dir = Path(__file__).parents[1] / "docs" / "src" - citation_rst = src_dir / "userguide" / "citation.rst" - assert citation_rst.is_file() message = ( - f"Follow the Pull Request process to update {citation_rst.name} " - "with the correct dates, DOI and version string for " - f"{release_strings.tag}.\n" - f"{citation_rst.absolute()}\n\n" - f"The PR should target {release_strings.branch} (prior to merge-back)." + "After the PR is merged, wait for the CI to complete, after which " + "the new version of Iris will be on conda-forge's servers.\n" + "https://dev.azure.com/conda-forge/feedstock-builds/_build?definitionId=464" ) - _wait_for_done(message) - + self.wait_for_done(message) -def merge_back( - release_strings: ReleaseStrings, first_in_series: bool, rsts: WhatsNewRsts -) -> None: - _break_print("Branch merge-back ...") + message = ( + f"Confirm that {self.strings.release} appears in this list:\n" + "https://anaconda.org/conda-forge/iris/files" + ) + self.wait_for_done(message) - merge_commit = ( - "BE SURE TO MERGE VIA A MERGE-COMMIT (not a squash-commit), to " - "preserve the commit SHA's." - ) + if not self.is_release_candidate: + message = ( + f"Confirm that {self.strings.release} is displayed on this " + "page as the latest available:\n" + "https://anaconda.org/conda-forge/iris" + ) + self.wait_for_done(message) - if first_in_series: - # TODO: automate + if self.is_release_candidate: + channel_command = " -c conda-forge/label/rc_iris " + else: + channel_command = " -c conda-forge " - working_branch = release_strings.branch + ".mergeback" - _delete_local_branch(working_branch) message = ( - "Checkout a local branch from the official ``main`` branch.\n" - "git fetch upstream;\n" - f"git checkout upstream/main -b {working_branch};" + "The new release will now undergo testing and validation in the " + "cf-staging channel. Once this is complete, the release will be " + "available in the standard conda-forge channel. This can " + "sometimes take minutes, or up to an hour.\n" + "Confirm that the new release is available for use from " + "conda-forge by running the following command:\n" + f"conda search{channel_command}iris=={self.strings.release};" ) - _wait_for_done(message) + self.wait_for_done(message) message = ( - f"Merge in the commits from {release_strings.branch}.\n" - f"{merge_commit}\n" - f"git merge upstream/{release_strings.branch} --no-ff " - '-m "Merging release branch into main";' + "Confirm that conda (or mamba) install works as expected:\n" + f"conda create -n tmp_iris{channel_command}iris=" + f"{self.strings.release};\n" + "conda activate tmp_iris;\n" + 'python -c "import iris; print(iris.__version__)";\n' + "conda deactivate;\n" + f"conda remove -n tmp_iris --all;" ) - _wait_for_done(message) + self.wait_for_done(message) - message = ( - "Recreate the following files, which are present in ``main``, but " - f"are currently deleted from {working_branch}:\n" - f"{rsts.latest.absolute()}\n" - f"{rsts.template.absolute()}\n" - "THEN:\n" - f"git add {rsts.latest.absolute()};\n" - f"git add {rsts.template.absolute()};\n" - ) - _wait_for_done(message) + def update_links(self): + self.print("Link updates ...") message = ( - f"In {rsts.index.absolute()}:\n" - f"Add {rsts.latest.name} to the top of the list of .rst files, " - f"and set the top include:: to be {rsts.latest.name} ." + "Revisit the GitHub release:\n" + f"https://github.com/SciTools/iris/releases/tag/{self.git_tag}\n" + "You have confirmed that Read the Docs, PyPI and conda-forge have all " + "updated correctly. Include the following links in the release " + "notes:\n\n" + f"https://scitools-iris.readthedocs.io/en/{self.git_tag}/\n" + f"https://pypi.org/project/scitools-iris/{self.strings.release}/\n" + f"https://anaconda.org/conda-forge/iris?version={self.strings.release}\n" ) - _wait_for_done(message) + self.wait_for_done(message) message = ( - "Commit and push all the What's New changes.\n" - "git commit -am \"Restore latest What's New files.\";\n" - f"git push -u origin {working_branch};" + "What is the URL for the GitHub discussions page of this " + "release?\n" + "https://github.com/SciTools/iris/discussions\n" ) - _wait_for_done(message) + discussion_url = self.get_input(message, "Input the URL") message = ( - "Follow the Pull Request process to get " - f"{working_branch} merged into upstream/main .\n" - "Make sure the documentation is previewed during this process.\n" - f"{merge_commit}" + f"Update {discussion_url}, with the above " + "links and anything else appropriate.\n" + "The simplest way is to copy appropriate content from a previous " + "release, then edit it to match the current release." ) - _wait_for_done(message) + self.wait_for_done(message) - else: message = ( - f"Propose a merge-back from {release_strings.branch} into ``main`` by " - f"visiting this URL and clicking `Create pull request`:\n" - f"https://github.com/SciTools/iris/compare/main...{release_strings.branch}\n" - f"{merge_commit}" + f"Comment on {discussion_url} to notify anyone watching that " + f"{self.git_tag} has been released." ) - _wait_for_done(message) + self.wait_for_done(message) + + def twitter_announce(self): message = ( - f"Once the pull request is merged ensure that the {release_strings.branch} " - "release branch is restored.\n" - "GitHub automation rules may have automatically deleted the release branch." + "Announce the release via https://twitter.com/scitools_iris, " + "and any " + "other appropriate message boards (e.g. Viva Engage).\n" + "Any content used for the announcement should be stored in the " + "SciTools/twitter-scitools-iris GitHub repo.\n" ) - _wait_for_done(message) - - -def main(): - _mark_section(1) - release_type = get_release_type() - - _mark_section(2) - release_strings = get_release_tag() - - _mark_section(3) - is_release_candidate = check_release_candidate( - release_type, - release_strings, - ) - - _mark_section(4) - is_first_in_series = check_first_in_series( - release_type, - release_strings, - is_release_candidate, - ) - - _mark_section(5) - update_standard_names( - is_first_in_series, - ) + if not self.first_in_series: + message += ( + f"Consider replying within an existing " + f"{self.strings.series} " + "announcement thread, if appropriate." + ) + self.wait_for_done(message) - _mark_section(6) - check_deprecations( - release_type, - ) + def merge_back(self): + self.print("Branch merge-back ...") - _mark_section(7) - create_release_branch( - release_strings, - is_first_in_series, - ) + merge_commit = ( + "BE SURE TO MERGE VIA A MERGE-COMMIT (not a squash-commit), to " + "preserve the commit SHA's." + ) - _mark_section(8) - whats_new_rsts = finalise_whats_new( - release_type, - release_strings, - is_release_candidate, - is_first_in_series, - ) + if self.first_in_series: + # TODO: automate - _mark_section(9) - cut_release( - release_strings, - is_release_candidate, - ) + working_branch = self.strings.branch + ".mergeback" + self._delete_local_branch(working_branch) + message = ( + "Checkout a local branch from the official ``main`` branch.\n" + "git fetch upstream;\n" + f"git checkout upstream/main -b {working_branch};" + ) + self.wait_for_done(message) - _mark_section(10) - check_rtd( - release_strings, - is_release_candidate, - ) + message = ( + f"Merge in the commits from {self.strings.branch}.\n" + f"{merge_commit}\n" + f"git merge upstream/{self.strings.branch} --no-ff " + '-m "Merging release branch into main";' + ) + self.wait_for_done(message) - _mark_section(11) - sha256 = check_pypi( - release_strings, - is_release_candidate, - ) + message = ( + "Recreate the What's New template from ``main``:\n" + f"git checkout upstream/main {self.whats_news.template.absolute()};\n" + ) + self.wait_for_done(message) - _mark_section(12) - update_conda_forge( - release_strings, - is_release_candidate, - sha256, - ) + message = ( + "Recreate the What's New latest from the template:\n" + f"cp {self.whats_news.template.absolute()} " + f"{self.whats_news.latest.absolute()};\n" + f"git add {self.whats_news.latest.absolute()};\n" + ) + self.wait_for_done(message) - _mark_section(13) - update_links( - release_strings, - ) + message = ( + f"Follow any guidance in {self.whats_news.latest.name} to " + "complete the recreation-from-template.\n" + "E.g. removing the bugfix section." + ) + self.wait_for_done(message) - _mark_section(14) - twitter_announce( - release_strings, - is_first_in_series, - ) + message = ( + f"In {self.whats_news.index.absolute()}:\n" + f"Add {self.whats_news.latest.name} to the top of the list of .rst " + f"files, " + f"and set the top include:: to be {self.whats_news.latest.name} ." + ) + self.wait_for_done(message) - _mark_section(15) - update_citation( - release_strings, - is_release_candidate, - ) + message = ( + "Commit and push all the What's New changes.\n" + f"git add {self.whats_news.index.absolute()};\n" + 'git commit -m "Restore latest Whats New files.";\n' + f"git push -u origin {working_branch};" + ) + self.wait_for_done(message) - _mark_section(16) - merge_back( - release_strings, - is_first_in_series, - whats_new_rsts, - ) + self._create_pr( + base_org="SciTools", + base_repo="iris", + base_branch="main", + head_branch=working_branch, + ) + message = ( + "Work with the development team to get the PR merged.\n" + "Make sure the documentation is previewed during this process.\n" + f"{merge_commit}" + ) + self.wait_for_done(message) - _break_print("RELEASE COMPLETE. Congratulations! 🎉") + else: + message = ( + f"Propose a merge-back from {self.strings.branch} into " + f"``main`` by " + f"visiting this URL and clicking `Create pull request`:\n" + f"https://github.com/SciTools/iris/compare/main..." + f"{self.strings.branch}\n" + f"{merge_commit}" + ) + self.wait_for_done(message) + message = ( + f"Once the pull request is merged ensure that the " + f"{self.strings.branch} " + "release branch is restored.\n" + "GitHub automation rules may have automatically deleted the " + "release branch." + ) + self.wait_for_done(message) if __name__ == "__main__": - main() + IrisRelease.main() From 7104f26ae519baaa9f75a4fc3473e211c1b2b8e3 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:23:04 +0000 Subject: [PATCH 15/74] Add a pytest-unittest translator to the conversion guide (#6213) * added conversion translation section, and fixed mocker reference * mocker is a fixture * which can, not which and * clarified mocker is the fixture, not .patch --- .../contributing_pytest_conversions.rst | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/docs/src/developers_guide/contributing_pytest_conversions.rst b/docs/src/developers_guide/contributing_pytest_conversions.rst index c6bb35c2cd..dd556154e7 100644 --- a/docs/src/developers_guide/contributing_pytest_conversions.rst +++ b/docs/src/developers_guide/contributing_pytest_conversions.rst @@ -41,7 +41,7 @@ Conversion Checklist #. Check for references to ``@tests``. These should be changed to ``@_shared_utils``. #. Check for references to ``with mock.patch("...")``. These should be replaced with - ``mocker.patch("...")``. Note, ``mocker.patch("...")`` is NOT a context manager. + ``mocker.patch("...")``. ``mocker`` is a fixture, and can be passed into functions. #. Check for ``np.testing.assert...``. This can usually be swapped for ``_shared_utils.assert...``. #. Check for references to ``super()``. Most test classes used to inherit from @@ -54,3 +54,23 @@ Conversion Checklist #. Check the file against https://github.com/astral-sh/ruff , using ``pip install ruff`` -> ``ruff check --select PT ``. +Common Translations +------------------- + +.. list-table:: + :widths: 50 50 + :header-rows: 1 + + * - ``unittest`` method + - ``pytest`` equivalent + * - ``assertTrue(x)`` + - ``assert x`` + * - ``assertFalse(x)`` + - ``assert not x`` + * - ``assertRegex(x, y)`` + - ``assert re.match(y, x)`` + * - ``assertRaisesRegex(cls, msg_re)`` + - ``with pytest.raises(cls, match=msg_re):`` + * - ``mock.patch(...)`` + - ``mocker.patch(...)`` + From 4b5cf254236d3b8fdf7d244690f9cc34bc9d8458 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 5 Nov 2024 14:43:03 +0000 Subject: [PATCH 16/74] [pre-commit.ci] pre-commit autoupdate (#6217) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.1 → v0.7.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.1...v0.7.2) - [github.com/aio-libs/sort-all: v1.2.0 → v1.3.0](https://github.com/aio-libs/sort-all/compare/v1.2.0...v1.3.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index acc88476e9..c0b04ab1a9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.1" + rev: "v0.7.2" hooks: - id: ruff types: [file, python] @@ -57,7 +57,7 @@ repos: types: [file, rst] - repo: https://github.com/aio-libs/sort-all - rev: v1.2.0 + rev: v1.3.0 hooks: - id: sort-all types: [file, python] From be9a04e54044d9a3d14cafc55b11d72f3149a6a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 12:06:09 +0000 Subject: [PATCH 17/74] Bump scitools/workflows from 2024.10.3 to 2024.11.2 (#6220) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.10.3 to 2024.11.2. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.10.3...2024.11.2) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 93e534a21c..6134ec2e04 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.10.3 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.2 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 898c5fe1b4..4275bcaab8 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.10.3 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.2 secrets: inherit From 3d2f12240f0d43f9b1ea05f96d57a1efe6983074 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:02:45 +0000 Subject: [PATCH 18/74] removed double reference of typehints and enhancements (#6219) --- docs/src/whatsnew/3.11.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/src/whatsnew/3.11.rst b/docs/src/whatsnew/3.11.rst index b6fcae64c1..59af73c9c6 100644 --- a/docs/src/whatsnew/3.11.rst +++ b/docs/src/whatsnew/3.11.rst @@ -22,10 +22,6 @@ This document explains the changes made to Iris for this release coordinates, e.g. a time-varying orography. This is controlled by the :meth:`~iris.LOAD_POLICY` object : see :class:`~iris.LoadPolicy`. - * We now have type hints in :class:`~iris.cube.Cube`, and - :meth:`iris.cube.CubeList.concatenate` is in places almost an order of - magnitude faster! - * `@bouweandela`_ added type hints for :class:`~iris.cube.Cube`. * Checkout the significant performance enhancements section for a couple of From 4e4138c1ff859352d478324a61edbf392cec2cab Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 11 Nov 2024 10:46:25 +0000 Subject: [PATCH 19/74] V3.11.x.updates (#6224) * removed mentions of RC from whatsnew 3.11 * corrected date in whatsnew 3.11 --- docs/src/whatsnew/3.11.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/3.11.rst b/docs/src/whatsnew/3.11.rst index 59af73c9c6..6aba0a5708 100644 --- a/docs/src/whatsnew/3.11.rst +++ b/docs/src/whatsnew/3.11.rst @@ -1,7 +1,7 @@ .. include:: ../common_links.inc -v3.11 (28 Oct 2024) [release candidate] -*************************************** +v3.11 (11 Nov 2024) +******************* This document explains the changes made to Iris for this release (:doc:`View all changes `.) From 0671e2bbbf609af8983180e03180e6d9d6d41558 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 14:48:23 +0000 Subject: [PATCH 20/74] Bump scitools/workflows from 2024.11.2 to 2024.11.3 (#6230) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.11.2 to 2024.11.3. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.11.2...2024.11.3) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 6134ec2e04..8e524c5b7d 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.2 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.3 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 4275bcaab8..9e604ebeaa 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.2 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.3 secrets: inherit From 8643b92bd934d15f9e0872f5e76f55976c5d49db Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 11:27:18 +0000 Subject: [PATCH 21/74] [pre-commit.ci] pre-commit autoupdate (#6226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.2 → v0.7.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.2...v0.7.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c0b04ab1a9..27d849a990 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.2" + rev: "v0.7.3" hooks: - id: ruff types: [file, python] From 00a9191a2944771d0dfe78d63c89094916a38ae4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 19:13:09 +0000 Subject: [PATCH 22/74] [pre-commit.ci] pre-commit autoupdate (#6233) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.3 → v0.7.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.3...v0.7.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 27d849a990..bbad00a706 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.3" + rev: "v0.7.4" hooks: - id: ruff types: [file, python] From 0a95b5a7682c602c0007bea91a9521d99c9e8bf9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 08:37:52 +0000 Subject: [PATCH 23/74] Bump codecov/codecov-action from 4 to 5 (#6235) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index 4b21e73384..72747ecb41 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -143,6 +143,6 @@ jobs: - name: "upload coverage report" if: ${{ matrix.coverage }} - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file From fde0fe705535708b7b9f7a11fc55f9237499d244 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:20:09 +0000 Subject: [PATCH 24/74] Bump scitools/workflows from 2024.11.3 to 2024.11.5 (#6236) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.11.3 to 2024.11.5. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.11.3...2024.11.5) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 8e524c5b7d..298567df32 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.3 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.5 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 9e604ebeaa..134f253f00 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.3 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.5 secrets: inherit From 91cba1e001927b61c74f2d45e56e21801111c398 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 22:20:18 +0000 Subject: [PATCH 25/74] [pre-commit.ci] pre-commit autoupdate (#6238) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.4 → v0.8.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.4...v0.8.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bbad00a706..2fff405a5e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.7.4" + rev: "v0.8.0" hooks: - id: ruff types: [file, python] From af475b1f1ae7828bf801447637a77484381536ed Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Thu, 28 Nov 2024 18:03:52 +0000 Subject: [PATCH 26/74] Reduce default number of benchmark rounds. (#6234) --- benchmarks/bm_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index afc08ff6fa..f7204db25f 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -27,7 +27,7 @@ GH_REPORT_DIR = ROOT_DIR.joinpath(".github", "workflows", "benchmark_reports") # Common ASV arguments for all run_types except `custom`. -ASV_HARNESS = "run {posargs} --attribute rounds=4 --interleave-rounds --show-stderr" +ASV_HARNESS = "run {posargs} --attribute rounds=3 --interleave-rounds --show-stderr" def echo(echo_string: str): From 80111b0fd13ecc66c3165e7f977e5a959f14a84c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Nov 2024 22:17:31 +0000 Subject: [PATCH 27/74] Bump scitools/workflows from 2024.11.5 to 2024.11.7 (#6241) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.11.5 to 2024.11.7. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.11.5...2024.11.7) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 298567df32..cb4ad45462 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.5 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.7 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 134f253f00..79f8848aa7 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.5 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.7 secrets: inherit From f959a95e4a0aa12ef377240b22e3a3bf4269eb89 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 08:54:02 +0000 Subject: [PATCH 28/74] [pre-commit.ci] pre-commit autoupdate (#6242) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.0 → v0.8.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.0...v0.8.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fff405a5e..86c27ca585 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.0" + rev: "v0.8.1" hooks: - id: ruff types: [file, python] From fc6ce5668ad846249c1bd77bb78edf96cbbe6671 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 10 Dec 2024 10:59:22 +0000 Subject: [PATCH 29/74] Fix broken link. (#6246) --- docs/src/further_topics/ugrid/other_meshes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/further_topics/ugrid/other_meshes.rst index 19f220be82..8b1c829cf1 100644 --- a/docs/src/further_topics/ugrid/other_meshes.rst +++ b/docs/src/further_topics/ugrid/other_meshes.rst @@ -360,5 +360,5 @@ dimensions into a single mesh dimension. Since Iris cubes don't support a "resh .. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3 -.. _FESOM 1.4: https://fesom.de/models/fesom14/ +.. _FESOM 1.4: https://www.fesom.de/models/fesom14/ .. _NEMO: https://www.nemo-ocean.eu/ \ No newline at end of file From 1308f6e94e1b5927589e1120e6524a013350136f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:18:28 +0000 Subject: [PATCH 30/74] [pre-commit.ci] pre-commit autoupdate (#6245) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.1 → v0.8.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.1...v0.8.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 86c27ca585..9ec380ca43 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.1" + rev: "v0.8.2" hooks: - id: ruff types: [file, python] From ab3cfac01576074d91dc8d250f09d739b2953d02 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:47:26 +0000 Subject: [PATCH 31/74] Bump scitools/workflows from 2024.11.7 to 2024.12.0 (#6243) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.11.7 to 2024.12.0. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.11.7...2024.12.0) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index cb4ad45462..ad0fa50d57 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.11.7 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.0 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 79f8848aa7..046404c1fe 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.11.7 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.0 secrets: inherit From 697ba60911b44a56d49224ed58d16594913f6847 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:33:05 +0000 Subject: [PATCH 32/74] Remove freepik.com link due to linkcheck breakage (#6261) --- docs/src/index.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/index.rst b/docs/src/index.rst index a9bf76fc96..139e54cee0 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -125,8 +125,7 @@ For more information see :ref:`why_iris`. Voted Issues -Icons made by `FreePik `_ from -`Flaticon `_ +Icons made by FreePik from `Flaticon `_ .. _iris_support: From a5d7424f2ee8e17a8a41c3b585611697539bcda7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 13:37:23 +0000 Subject: [PATCH 33/74] [pre-commit.ci] pre-commit autoupdate (#6259) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.2 → v0.8.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.2...v0.8.3) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9ec380ca43..21eca9dff4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.2" + rev: "v0.8.3" hooks: - id: ruff types: [file, python] From 50ffdbdd52d36a091060c3ce3f96a4b91c9e8c27 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 17 Dec 2024 16:06:41 +0000 Subject: [PATCH 34/74] `date_microseconds` FUTURE flag (#6260) * PoC monkeypatch precision. * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Add FUTURE flag. * FutureWarning. * Corrected behaviour and added tests. * Corrected behaviour and added tests. * What's New entry. * Make sensitive to cf-units version. * Further test improvements. * Clearer FutureWarning text. * Use a cf-units subclass instead. * Rename _IrisUnit to Unit. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 13 ++- lib/iris/__init__.py | 23 ++++- lib/iris/common/mixin.py | 67 ++++++++++++- .../metadata/test_microsecond_future.py | 98 +++++++++++++++++++ 4 files changed, 196 insertions(+), 5 deletions(-) create mode 100644 lib/iris/tests/unit/common/metadata/test_microsecond_future.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7325630f22..52b9853ac2 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -30,7 +30,13 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. N/A +#. `@trexfeathers`_ added a new :class:`~iris.Future` flag - + ``date_microseconds`` - which sets whether Iris should use the new + microsecond-precision units (see :class:`cf_units.Unit`, microseconds + introduced in version 3.3) when the unit + is a time unit. The previous maximum precision was seconds. You should check + your code for new floating point problems if activating this (e.g. when + using the :class:`~iris.Constraint` API). (:pull:`6260`) 🐛 Bugs Fixed @@ -50,7 +56,10 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. N/A +#. Note that due to the new ``date_microseconds`` :class:`~iris.Future` flag, + the time coordinate categorisation speedup introduced in + :doc:`/whatsnew/3.11` will only be available when + ``iris.FUTURE.date_microseconds == True``. 🔥 Deprecations diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index d4454efe89..bc2b84709d 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -143,7 +143,13 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=False): + def __init__( + self, + datum_support=False, + pandas_ndim=False, + save_split_attrs=False, + date_microseconds=False, + ): """Container for run-time options controls. To adjust the values simply update the relevant attribute from @@ -169,6 +175,13 @@ def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=Fals different ways : "global" ones are saved as dataset attributes, where possible, while "local" ones are saved as data-variable attributes. See :func:`iris.fileformats.netcdf.saver.save`. + date_microseconds : bool, default=False + Newer versions of cftime and cf-units support microsecond precision + for dates, compared to the legacy behaviour that only works with + seconds. Enabling microsecond precision will alter core Iris + behaviour, such as when using :class:`~iris.Constraint`, and you + may need to defend against floating point precision issues where + you didn't need to before. """ # The flag 'example_future_flag' is provided as a reference for the @@ -181,6 +194,7 @@ def __init__(self, datum_support=False, pandas_ndim=False, save_split_attrs=Fals self.__dict__["datum_support"] = datum_support self.__dict__["pandas_ndim"] = pandas_ndim self.__dict__["save_split_attrs"] = save_split_attrs + self.__dict__["date_microseconds"] = date_microseconds # TODO: next major release: set IrisDeprecation to subclass # DeprecationWarning instead of UserWarning. @@ -189,7 +203,12 @@ def __repr__(self): # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) msg = "Future(datum_support={}, pandas_ndim={}, save_split_attrs={})" - return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs) + return msg.format( + self.datum_support, + self.pandas_ndim, + self.save_split_attrs, + self.date_microseconds, + ) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options: dict[str, Literal["error", "warning"]] = {} diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 87d58944c7..e238ab9d36 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -7,8 +7,10 @@ from __future__ import annotations from collections.abc import Mapping +from datetime import timedelta from functools import wraps from typing import Any +import warnings import cf_units import numpy as np @@ -139,6 +141,68 @@ def update(self, other, **kwargs): dict.update(self, other, **kwargs) +class Unit(cf_units.Unit): + # TODO: remove this subclass once FUTURE.date_microseconds is removed. + + @classmethod + def from_unit(cls, unit: cf_units.Unit): + """Cast a :class:`cf_units.Unit` to an :class:`Unit`.""" + if isinstance(unit, Unit): + result = unit + elif isinstance(unit, cf_units.Unit): + result = cls.__new__(cls) + result.__dict__.update(unit.__dict__) + else: + message = f"Expected a cf_units.Unit, got {type(unit)}" + raise TypeError(message) + return result + + def num2date( + self, + time_value, + only_use_cftime_datetimes=True, + only_use_python_datetimes=False, + ): + # Used to patch the cf_units.Unit.num2date method to round to the + # nearest second, which was the legacy behaviour. This is under a FUTURE + # flag - users will need to adapt to microsecond precision eventually, + # which may involve floating point issues. + from iris import FUTURE + + def _round(date): + if date.microsecond == 0: + return date + elif date.microsecond < 500000: + return date - timedelta(microseconds=date.microsecond) + else: + return ( + date + + timedelta(seconds=1) + - timedelta(microseconds=date.microsecond) + ) + + result = super().num2date( + time_value, only_use_cftime_datetimes, only_use_python_datetimes + ) + if FUTURE.date_microseconds is False: + message = ( + "You are using legacy date precision for Iris units - max " + "precision is seconds. In future, Iris will use microsecond " + "precision - available since cf-units version 3.3 - which may " + "affect core behaviour. To opt-in to the " + "new behaviour, set `iris.FUTURE.date_microseconds = True`." + ) + warnings.warn(message, category=FutureWarning) + + if hasattr(result, "shape"): + vfunc = np.vectorize(_round) + result = vfunc(result) + else: + result = _round(result) + + return result + + class CFVariableMixin: _metadata_manager: Any @@ -207,7 +271,8 @@ def units(self) -> cf_units.Unit: @units.setter def units(self, unit: cf_units.Unit | str | None) -> None: - self._metadata_manager.units = cf_units.as_unit(unit) + unit = cf_units.as_unit(unit) + self._metadata_manager.units = Unit.from_unit(unit) @property def attributes(self) -> LimitedAttributeDict: diff --git a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py new file mode 100644 index 0000000000..b86ebf06d4 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py @@ -0,0 +1,98 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the opt-in FUTURE.date_microseconds behaviour.""" + +import warnings + +import cf_units +import numpy as np +from packaging.version import Version +import pytest + +from iris import FUTURE +from iris.coords import DimCoord +from iris.tests._shared_utils import assert_array_equal + +cf_units_legacy = Version(cf_units.__version__) < Version("3.3.0") + + +@pytest.fixture( + params=[0, 1000, 500000], + ids=["no_microseconds", "1_millisecond", "half_second"], +) +def time_coord(request) -> tuple[bool, DimCoord]: + points = np.array([0.0, 1.0, 2.0]) + points += request.param / 1e6 + return request.param, DimCoord( + points, + "time", + units="seconds since 1970-01-01 00:00:00", + ) + + +@pytest.fixture( + params=[False, True], + ids=["without_future", "with_future"], +) +def future_date_microseconds(request): + FUTURE.date_microseconds = request.param + yield request.param + FUTURE.date_microseconds = False + + +def test_warning(time_coord, future_date_microseconds): + # Warning should be raised whether the coordinate has microseconds or not. + # Want users to be aware, and opt-in, as early as possible. + n_microseconds, coord = time_coord + + def _op(): + _ = coord.units.num2date(coord.points) + + if future_date_microseconds: + with warnings.catch_warnings(): + warnings.simplefilter("error", FutureWarning) + _op() + else: + with pytest.warns(FutureWarning): + _op() + + +@pytest.mark.parametrize( + "indexing", + (np.s_[0], np.s_[:], np.s_[:, np.newaxis]), + ids=("single", "array", "array_2d"), +) +def test_num2date(time_coord, future_date_microseconds, indexing): + n_microseconds, coord = time_coord + result = coord.units.num2date(coord.points[indexing]) + + if indexing == np.s_[0]: + assert hasattr(result, "microsecond") + # Convert to iterable for more consistency downstream. + result = [result] + else: + assert hasattr(result, "shape") + assert hasattr(result.flatten()[0], "microsecond") + result = result.flatten() + + expected_microseconds = n_microseconds + if not future_date_microseconds or cf_units_legacy: + expected_microseconds = 0 + + result_microseconds = np.array([r.microsecond for r in result]) + assert_array_equal(result_microseconds, expected_microseconds) + + +def test_roundup(time_coord, future_date_microseconds): + n_microseconds, coord = time_coord + result = coord.units.num2date(coord.points) + + expected_seconds = np.floor(coord.points) + if n_microseconds >= 500000 and (not future_date_microseconds or cf_units_legacy): + # Legacy cf-units versions round microseconds and ignore the future flag. + expected_seconds += 1 + + result_seconds = np.array([r.second for r in result]) + assert_array_equal(result_seconds, expected_seconds) From 1cb07a7d9a8ec11cd40165cc72089dd267329a55 Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Wed, 18 Dec 2024 12:32:35 +0000 Subject: [PATCH 35/74] Pin dask <2024.9 (#6255) * pin dask <2024.9 * update lockfiles * update for pypi * add whatsnew, fix tests * skip failing doctests --- docs/src/whatsnew/latest.rst | 3 +- lib/iris/common/resolve.py | 4 +- requirements/locks/py310-linux-64.lock | 345 ++++++++++++------------- requirements/locks/py311-linux-64.lock | 343 ++++++++++++------------ requirements/locks/py312-linux-64.lock | 343 ++++++++++++------------ requirements/py310.yml | 2 +- requirements/py311.yml | 2 +- requirements/py312.yml | 2 +- requirements/pypi-core.txt | 2 +- 9 files changed, 522 insertions(+), 524 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 52b9853ac2..ae13b8a883 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -71,7 +71,8 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. N/A +#. `@stephenworsley`_ pinned dask to <2024.9 due to an indexing bug. (:issue:`6251`, + :pull:`6255`) 📚 Documentation diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index c4bc18309b..7a57ef7c2b 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -2592,9 +2592,9 @@ def shape(self): source 'Data from Met Office Unified Model 6.05' >>> Resolve().shape is None True - >>> Resolve(cube1, cube2).shape + >>> Resolve(cube1, cube2).shape # doctest: +SKIP (240, 37, 49) - >>> Resolve(cube2, cube1).shape + >>> Resolve(cube2, cube1).shape # doctest: +SKIP (240, 37, 49) """ # noqa: D214, D406, D407, D410, D411 diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 97972bfa1f..333803d50f 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -1,46 +1,45 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 3a1bed2476064df92c4edecb4c0b462e6b4ecaa37082bd1ee61a2502ff4671a1 +# input_hash: 1da357481d2aa24f523bbf22e71abbbe003a13f04d24d01ab4958bc0d30293b8 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda#720523eb0d6a9b0f6120c16b2aa4e7de https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-5_cp310.conda#2921c34715e74b3587b4cff4d36844f9 https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 -https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.5-ha770c72_0.conda#25965c1d1d5fc00ce2b663b73008e3b7 +https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.6-h005c6e1_0.conda#9464e297fa2bf08030c65a54342b48c3 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 -https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda#1ece2ccb1dc8c68639712b05e0fae070 +https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda#38a5cd3be5fb620b48069e27285f1a44 +https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda#2b780c0338fc0ffa678ac82c54af51fd +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda#b422943d5d772b7cc858b36ad2a92db5 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-hb9d3cd8_1004.conda#bc4cd53a083b6720d61a1519a1900878 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda#7ed427f0871fd41cb1d9c17727c17589 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab -https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-h84d6215_0.conda#1190da4988807db89b31e2173128892f +https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc @@ -51,27 +50,32 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe -https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d +https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 +https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 @@ -86,25 +90,23 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa -https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda#ee605e794bdc14e2b7f84c4faa0d8c2c +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda#e8536ec89df2aec5f65fefcf4ccd58ba +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa +https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f -https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.2.1-h5888daf_0.conda#0d9c441855be3d8dfdb2e800fe755059 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b @@ -112,176 +114,183 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda# https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda#ad748ccca349aec3e91743e08b5e2b50 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 -https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 -https://conda.anaconda.org/conda-forge/linux-64/python-3.10.15-h4a871b0_2_cpython.conda#98059097f62e97be9aed7ec904055825 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.16-he725a3c_1_cpython.conda#b887811a901b3aa622a92caf03bc8917 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.conda#d8602724ac0d276c380b97e9eb0f814b -https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 -https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda#279b0de5f6ba95457190a1c459a64e31 +https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.4-pyhd8ed1ab_1.conda#296b403617bafa89df4971567af79013 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a +https://conda.anaconda.org/conda-forge/noarch/async-timeout-5.0.1-pyhd8ed1ab_1.conda#5d842988b11a8c3ab57fb70840c83d24 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hf71b8c6_2.conda#bf502c169c71e3c6ac0d6175addfacc2 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 -https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_0.conda#4d155b600b63bc6ba89d91fab74238f8 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py310h5b1441d_3.conda#f1dd2d9a5c782683c28918f44ba547a8 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 -https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 -https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py310ha75aee5_0.conda#8aac4068f272b6bdeb0aa0f29d8e516f -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 -https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 -https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py310h3788b33_0.conda#4186d9b4d004b0fe0de6aa62496fb48a https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda#6e801c50a40301f6978c53976917b277 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 -https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py310hff52083_2.conda#4e8b2a2851668c8ad4d5360845281be9 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py310h89163eb_0.conda#5415555830a54d9b4a1307e3e9d942c7 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py310h89163eb_1.conda#8ce3f0332fd6de0d737e2911d329523f https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py310h3788b33_0.conda#6b586fb03d84e5bfbb1a8a3d9e2c9b60 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf -https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py310ha75aee5_2.conda#d38aa9579b7210c646e6faef1aed5bbb +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py310ha75aee5_0.conda#dbc29ca007ac8bd41fb6921c6317740b https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py310ha75aee5_0.conda#a42a2ed94df11c5cfa5348a317e1f197 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 -https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py310ha75aee5_1.conda#8b430470d53744289cb5499bc99a6485 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py310ha75aee5_1.conda#0d4c5c76ae5f5aac6f0be419963a19dd -https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e977934e00b355ff55ed154904044727 -https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py310ha75aee5_1.conda#260c9ae4b2d9af7d5cce7b721cba6132 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda#b0dd904de08b7db706167240bf37b164 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac944244f1fed2eb49bae07193ae8215 +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py310ha75aee5_0.conda#166d59aab40b9c607b4cc21c03924e9d +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310ha75aee5_1.conda#ee18e67b0bd283f6a75369936451d6ac -https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda#bb2638cd7fbdd980b1cff9a99a6c1fa8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda#7da9007c0582712c4bad4131f89c8372 -https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda#4daaed111c05672ae669f7036ee5bba3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a -https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 -https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 +https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py310h8deb56e_0.conda#1fc24a3196ad5ede2a68148be61894f4 -https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_0.conda#7c2b6931f9b3548ed78478332095c3e9 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py310h89163eb_0.conda#5222543cdb180f0fecc0d4b9f6b4a225 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py310ha75aee5_1.conda#81bbbb02f3664a012ce65c4fa8e8ca35 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py310h89163eb_1.conda#d30cf58ede43135249a18c5926a96d3f -https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 -https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py310h89163eb_0.conda#02795aff079fa439dbc85b4e19f9a122 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py310ha75aee5_0.conda#d0be1adaa04a03aed745f3d02afb59ce +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py310h89163eb_0.conda#edd1be5d8c667f5e53667433e84efabc +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.conda#09967792ea2191a0bdb461f9c889e510 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py310h5851e9f_0.conda#b36342af1ea0eb44bb6ccdefcb9d80d7 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py310hfeaa1f3_0.conda#1947280342c7259b82a707e38ebc212e -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 -https://conda.anaconda.org/conda-forge/noarch/async-timeout-4.0.3-pyhd8ed1ab_0.conda#3ce482ec3066e6d809dbbb1d1679f215 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda#2a92e152208121afadf85a5e1f3a5f4d -https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_1.conda#4809b9f4c6ce106d443c3f90b8e10db2 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py310hf462985_1.conda#c2d5289e6cbcecf2c549e01772fe5274 +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py310h3788b33_0.conda#f993b13665fc2bb262b30217c815d137 +https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d +https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h2564987_115.conda#c5ce70b76c77a6c9a3107be8d8e8ab0b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py310h89163eb_1.conda#4e13be3228db4b8e1349483e821b6046 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py310hd6e36ab_0.conda#d64ac80cd7861e079770982204d4673b -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f +https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py310ha75aee5_1.conda#48781b625a5c7701e04d222752cb2f62 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py310h89163eb_2.conda#51e1600159ad5bc732f761a11c285189 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py310h5eaa309_1.conda#e67778e1cac3bca3b3300f6164f7ffb9 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py310hf462985_2.conda#79f0b0f4ddfa86d17b061bab22533af1 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py310h2e9f774_0.conda#42a3ea3c283d930ae6d156b97ffe4740 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c -https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0.conda#4c441eff2be2e65bd67765c5642051c5 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py310hfcf56fc_2.conda#b5d548b2a7cf8d0c74fc6c4bf42d1ca5 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py310had3dfd6_2.conda#a4166b41e54d22e794859641b7cae2d0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310h3788b33_5.conda#e05b0475166b68c9dc4d7937e0315654 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py310ha39cb0e_1.conda#f49de34fb99934bf49ab330b5caffd64 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py310hf462985_1.conda#c2d5289e6cbcecf2c549e01772fe5274 -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py310h3788b33_2.conda#de92ea39a4d3afe19b6ee56701ebfa05 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda#7823092a3cf14e98a52d2a2875c47c80 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py310hf462985_0.conda#c31938674e4cda43617a4d70f99ffd0c +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda#43f629202f9eec21be5f71171fb5daf8 -https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec @@ -295,48 +304,38 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-202 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py310ha75aee5_1.conda#48781b625a5c7701e04d222752cb2f62 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_107.conda#5bd5042289ef82196bae48948314cdf9 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py310h5eaa309_1.conda#e67778e1cac3bca3b3300f6164f7ffb9 -https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py310hf462985_1.conda#4f1c137b6ea5e8c7ce95c28b053843cc -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py310hf462985_2.conda#bb603a9ffd8f7e97c93c74a5e3c3febd +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py310h68603db_0.conda#409498230a11a71578ed49d006165249 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py310h5146f0f_101.conda#3e17df8e4192431b85afc89a539f6c28 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py310hfcf56fc_1.conda#d9b1b75a227dbc42f3fe0e8bc852b805 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py310had3dfd6_2.conda#a4166b41e54d22e794859641b7cae2d0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py310ha75aee5_0.conda#f0734f65184577c08c9f1ba92cd9f57f -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py310h89163eb_0.conda#cdc075f4328556adf4dde97b4f4a0532 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py310hf462985_6.conda#b8ad2d561f4e0db4f09d06cc0e73e0b0 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py310h68603db_1.conda#989ef368e7b81b6c28e608e651a2a7d8 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py310h5146f0f_100.conda#82a474392c02d35a969aaad569cbbca8 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310hf462985_3.conda#7fd2a4e83e8ff3a760984300dad6297c -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py310ha75aee5_0.conda#4d5fc35ca762815c1b159b710cb22897 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py310h89163eb_0.conda#88ec741ee5b8132d2824dd0034c2f67c https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py310h5eaa309_0.conda#ca4d935c1715f95b6e86846ad1675a2b -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310hf462985_3.conda#7fd2a4e83e8ff3a760984300dad6297c +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py310h9617cfe_209.conda#1989896d5ae944eced08372bda5676a5 https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py310hc8241c7_209.conda#063eb6107225478aa00f283b102f3ec8 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py310he5e186c_209.conda#03fd79331809ea4812c5430e47c04723 -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.1-pyhd8ed1ab_0.conda#0731b45087c0358ca8b7d9fe855dec1a +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 -https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 1304a69343..06b6647897 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -1,46 +1,45 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 35b01abef89f7af6fc6928cd3e505497158c7209f4f0efed8da35047fdabdc86 +# input_hash: 39a923508b8f95686d14f6c04203885be342684e0fbae7d1a2d14233574bafb7 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda#720523eb0d6a9b0f6120c16b2aa4e7de https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-5_cp311.conda#139a8d40c8a2f430df31048949e450de https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 -https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.5-ha770c72_0.conda#25965c1d1d5fc00ce2b663b73008e3b7 +https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.6-h005c6e1_0.conda#9464e297fa2bf08030c65a54342b48c3 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 -https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda#1ece2ccb1dc8c68639712b05e0fae070 +https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda#38a5cd3be5fb620b48069e27285f1a44 +https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda#2b780c0338fc0ffa678ac82c54af51fd +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda#b422943d5d772b7cc858b36ad2a92db5 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-hb9d3cd8_1004.conda#bc4cd53a083b6720d61a1519a1900878 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda#7ed427f0871fd41cb1d9c17727c17589 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab -https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-h84d6215_0.conda#1190da4988807db89b31e2173128892f +https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc @@ -51,27 +50,32 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe -https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d +https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 +https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 @@ -86,25 +90,23 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa -https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda#ee605e794bdc14e2b7f84c4faa0d8c2c +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda#e8536ec89df2aec5f65fefcf4ccd58ba +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa +https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f -https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.2.1-h5888daf_0.conda#0d9c441855be3d8dfdb2e800fe755059 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b @@ -112,177 +114,184 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda# https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda#ad748ccca349aec3e91743e08b5e2b50 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 -https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.10-hc5c86c4_3_cpython.conda#9e1ad55c87368e662177661a998feed5 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.11-h9e4cc4f_1_cpython.conda#8387070aa413ce9a8cc35a509fae938b +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.conda#d8602724ac0d276c380b97e9eb0f814b -https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 -https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda#279b0de5f6ba95457190a1c459a64e31 +https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.4-pyhd8ed1ab_1.conda#296b403617bafa89df4971567af79013 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hfdbb021_2.conda#d21daab070d76490cb39a8f1d1729d79 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 -https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_0.conda#4d155b600b63bc6ba89d91fab74238f8 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py311h55d416d_3.conda#d21db006755203fe890596d3eae992ce https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 -https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 -https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py311h9ecbd09_0.conda#75424a18fb275a18b288c099b869c3bc -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 -https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 -https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py311hd18a35c_0.conda#be34c90cce87090d24da64a7c239ca96 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda#6e801c50a40301f6978c53976917b277 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 -https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py311h38be061_2.conda#733b481d20ff260a34f2b0003ff4fbb3 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py311h2dc5d0c_0.conda#15e4dadd59e93baad7275249f10b9472 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py311h2dc5d0c_1.conda#6565a715337ae279e351d0abd8ffe88a https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py311hd18a35c_0.conda#682f76920687f7d9283039eb542fdacf -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py311h2dc5d0c_1.conda#5384f857bd8b0fc3a62ce1ece858c89f +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py311h2dc5d0c_2.conda#bb8ca118919836624d920b4c44383a15 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf -https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py311h9ecbd09_2.conda#85a56dd3b692fb5435de1e901354b5b8 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py311h9ecbd09_0.conda#20d1c4ad24ac50f0941c63e81e4a86b7 https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py311h9ecbd09_0.conda#0ffc1f53106a38f059b151c465891ed3 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 -https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py311h9ecbd09_1.conda#b1796d741ca619dbacb79917b20e5a05 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py311h9ecbd09_1.conda#abeb54d40f439b86f75ea57045ab8496 -https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e977934e00b355ff55ed154904044727 -https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py311h9ecbd09_1.conda#616fed0b6f5c925250be779b05d1d7f7 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda#b0dd904de08b7db706167240bf37b164 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac944244f1fed2eb49bae07193ae8215 +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py311h9ecbd09_0.conda#df3aee9c3e44489257a840b8354e77b9 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py311h9ecbd09_1.conda#00895577e2b4c24dca76675ab1862551 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda#bb2638cd7fbdd980b1cff9a99a6c1fa8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda#7da9007c0582712c4bad4131f89c8372 -https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda#4daaed111c05672ae669f7036ee5bba3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a -https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 -https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 +https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py311hf29c0ef_0.conda#55553ecd5328336368db611f350b7039 -https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_0.conda#7c2b6931f9b3548ed78478332095c3e9 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py311h2dc5d0c_0.conda#4d74dedf541d0f87fce0b5797b66e425 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py311h9ecbd09_1.conda#765c19c0b6df9c143ac8f959d1a1a238 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py311h2dc5d0c_1.conda#7336fc1b2ead4cbdda1268dd6b7a6c38 -https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 -https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py311h2dc5d0c_0.conda#098c90e7d8761167e0f54ed6f81ee2f0 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py311h9ecbd09_0.conda#69a0a85acdcc5e6d0f1cc915c067ad4c +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py311h2dc5d0c_0.conda#27bc755bed4972c51f4d2789f2cde56c +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.conda#09967792ea2191a0bdb461f9c889e510 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py311hf916aec_0.conda#82c097817ff68e7b6f5db63cdcb593d2 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py311h49e9ac3_0.conda#2bd3d0f839ec0d1eaca817c9d1feb7c2 -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py311h9ecbd09_0.conda#d9c23163e7ac5f8926372c7d792a996f -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py311h2dc5d0c_0.conda#4f0fa0019a6e7be77db3609a707a4581 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py311h9ecbd09_0.conda#385d54815a5d2e74e68374d77446030b +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py311h2dc5d0c_0.conda#7ddc4f7d7120a103af3e06cf7f7e7fb1 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda#2a92e152208121afadf85a5e1f3a5f4d -https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_1.conda#4809b9f4c6ce106d443c3f90b8e10db2 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py311h9f3472d_1.conda#2c3c4f115d28ed9e001a271d5d8585aa +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py311hd18a35c_0.conda#351cb68d2081e249069748b6e60b3cd2 +https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d +https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h2564987_115.conda#c5ce70b76c77a6c9a3107be8d8e8ab0b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py311h71ddf71_0.conda#4e72b55892331ada8fbcf5954df582f2 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f +https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py311h9ecbd09_1.conda#28d6b63784b350a2906dc264ad8c7f2a +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py311h7db5c69_1.conda#643f8cb35133eb1be4919fb953f0a25f +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py311h9f3472d_2.conda#72b6f1a496a67977b772b53fae55308a https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py311h0f98d5a_0.conda#22531205a97c116251713008d65dfefd -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c -https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py311h9f3472d_0.conda#17334e5c12abdf2db6b25bd4187cd3e4 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py311he9a78e4_2.conda#c4aee8cadc4c9fc9a91aca0803473690 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py311h2fdb869_2.conda#4c78235905053663d1c9e23df3f11b65 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311hd18a35c_5.conda#4e8447ca8558a203ec0577b4730073f3 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311hbc35293_1.conda#aec590674ba365e50ae83aa2d6e1efae -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py311h9f3472d_1.conda#2c3c4f115d28ed9e001a271d5d8585aa -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py311hd18a35c_2.conda#66266cd4f20e47dc1de458c93fb4d2a9 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda#7823092a3cf14e98a52d2a2875c47c80 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py311h9f3472d_0.conda#555b148cafbd96b658499060d5e11a65 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda#43f629202f9eec21be5f71171fb5daf8 -https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec @@ -296,46 +305,36 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-202 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py311h9ecbd09_1.conda#28d6b63784b350a2906dc264ad8c7f2a -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_107.conda#5bd5042289ef82196bae48948314cdf9 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py311h7db5c69_1.conda#643f8cb35133eb1be4919fb953f0a25f -https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py311h9f3472d_1.conda#87b04d34d110ea5ff945f1949b7436be -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py311h9f3472d_2.conda#bd9c3ff46028eec017bc78377f9e0fb6 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py311h2b939e6_0.conda#79239585ea50c427415ef629534bb3aa +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py311h7c29e4f_101.conda#d966f11d28c699da7e9de2aa2f323a4f https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py311he9a78e4_1.conda#49ba89bf4d8a995efb99517d1c7aeb1e -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py311h2fdb869_2.conda#4c78235905053663d1c9e23df3f11b65 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h9f3472d_6.conda#ac7dc7f70f8d2c1d96ecb7e4cb196498 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py311h7db5c69_0.conda#20ba399d57a2b5de789a5b24341481a1 +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py311h2b939e6_1.conda#db431da3476c884ef08d9f42a32913b6 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py311h7c29e4f_100.conda#11395670c4eeda7a60c13c313a83727f -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h9f3472d_3.conda#a7c4169b1c920361597ddacb461350fd -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py311h7158b74_209.conda#011801a68c022cf9692a4567d84678ca -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py311h7db5c69_0.conda#20ba399d57a2b5de789a5b24341481a1 -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py311hc8241c7_209.conda#13fdaae5c7c5c76089ca76f63b287ef5 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py311he5e186c_209.conda#54a9526336ff06739344f87726cbc61e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.1-pyhd8ed1ab_0.conda#0731b45087c0358ca8b7d9fe855dec1a +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 -https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index cbec79a901..f101e6b409 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,46 +1,45 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 8cb273c57f190b95e7db1b3aae01b38ca08c48334bd8a71035d82f412ddd84bc +# input_hash: 989d858ad22ed9fe27cc23f25fd7ad423d1250d679d35944ae71177ccc27a44e @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda#720523eb0d6a9b0f6120c16b2aa4e7de https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 -https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.5-ha770c72_0.conda#25965c1d1d5fc00ce2b663b73008e3b7 +https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.6-h005c6e1_0.conda#9464e297fa2bf08030c65a54342b48c3 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 -https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_1.conda#1ece2ccb1dc8c68639712b05e0fae070 +https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_1.conda#38a5cd3be5fb620b48069e27285f1a44 +https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda#2b780c0338fc0ffa678ac82c54af51fd +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda#b422943d5d772b7cc858b36ad2a92db5 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-hb9d3cd8_1004.conda#bc4cd53a083b6720d61a1519a1900878 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.12-h4ab18f5_0.conda#7ed427f0871fd41cb1d9c17727c17589 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab -https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-h84d6215_0.conda#1190da4988807db89b31e2173128892f +https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc @@ -51,27 +50,32 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_2.conda#85c0dc0bcd110c998b01856975486ee7 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe -https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hd590300_1.conda#c66f837ac65e4d1cdeb80e2a1d5fcc3d +https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 +https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 @@ -86,25 +90,23 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa -https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda#ee605e794bdc14e2b7f84c4faa0d8c2c +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda#e8536ec89df2aec5f65fefcf4ccd58ba +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa +https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f -https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.2.1-h5888daf_0.conda#0d9c441855be3d8dfdb2e800fe755059 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b @@ -112,177 +114,184 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda# https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda#ad748ccca349aec3e91743e08b5e2b50 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 -https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_1.conda#80a57756c545ad11f9847835aa21e6b2 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_2.conda#57a9e7ee3c0840d3c8c9012473978629 -https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.conda#d8602724ac0d276c380b97e9eb0f814b -https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 -https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda#279b0de5f6ba95457190a1c459a64e31 +https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.4-pyhd8ed1ab_1.conda#296b403617bafa89df4971567af79013 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 -https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_0.conda#4d155b600b63bc6ba89d91fab74238f8 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py312h8fd2918_3.conda#21e433caf1bb1e4c95832f8bb731d64c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 -https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 -https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 -https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 -https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda#6e801c50a40301f6978c53976917b277 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 -https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_1.conda#204892bce2e44252b5cf272712f10bdd -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.3-ha7bfdaf_0.conda#8bd654307c455162668cd66e36494000 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py312h7900ff3_2.conda#fddd3092f921be8e01b18f2a0266d98f -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda#a755704ea0e2503f8c227d84829a8e81 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda#eb227c3e0bf58f5bd69c0532b157975b https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda#5c9b020a3f86799cdc6115e55df06146 -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_1.conda#e397d9b841c37fc3180b73275ce7e990 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_2.conda#5b5e3267d915a107eca793d52e1b780a https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf -https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py312h66e93f0_2.conda#2c6c0c68f310bc33972e7c83264d7786 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h66e93f0_0.conda#55d5742a696d7da1c1262e99b6217ceb https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 -https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f0_1.conda#39aed2afe4d0cf76ab3d6b09eecdbea7 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 -https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_0.conda#9e57330f431abbb4c88a5f898a4ba223 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e977934e00b355ff55ed154904044727 -https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda#b0dd904de08b7db706167240bf37b164 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac944244f1fed2eb49bae07193ae8215 +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda#e417822cb989e80a0d2b1b576fdd1657 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.2-hb9d3cd8_0.conda#bb2638cd7fbdd980b1cff9a99a6c1fa8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda#7da9007c0582712c4bad4131f89c8372 -https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda#4daaed111c05672ae669f7036ee5bba3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a -https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 -https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 +https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 -https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_0.conda#7c2b6931f9b3548ed78478332095c3e9 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py312h178313f_0.conda#a32fbd2322865ac80c7db74c553f5306 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda#a921e2fe122e7f38417b9b17c7a13343 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h178313f_1.conda#bbbf5fa5cab622c33907bc8d7eeea9f7 -https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 -https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py312h178313f_0.conda#a6a5f52f8260983b0aaeebcebf558a3e +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_0.conda#968104bfe69e21fadeb30edd9c3785f9 +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_101.conda#09967792ea2191a0bdb461f9c889e510 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.3-default_hb5137d0_0.conda#311e6a1d041db3d6a8a8437750d4234f -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.3-default_h9c6a7e4_0.conda#b8a8cd77810b20754f358f2327812552 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py312h7e784f5_0.conda#c9e9a81299192e77428f40711a4fb00d +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py312h178313f_0.conda#3e92784b8e32ab7d0b95ee296ba79a99 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda#2a92e152208121afadf85a5e1f3a5f4d -https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_1.conda#4809b9f4c6ce106d443c3f90b8e10db2 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a +https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d +https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h2564987_115.conda#c5ce70b76c77a6c9a3107be8d8e8ab0b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.2-py312h58c1407_0.conda#b7e9a46277a1ee0afc6311e7760df0c3 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_0.conda#5a166b998fd17cdaaaadaccdd71a363f +https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c -https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 -https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_0.conda#ba9f7f0ec4f2a18de3e7bce67c4a431e -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2021.13.0-h94b29a5_0.conda#4431bd4ace17dd09b97caf68509b016b +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_2.conda#94688dd449f6c092e5f951780235aca1 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda#7823092a3cf14e98a52d2a2875c47c80 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda#43f629202f9eec21be5f71171fb5daf8 -https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec @@ -296,46 +305,36 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-202 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_107.conda#5bd5042289ef82196bae48948314cdf9 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b -https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_1.conda#b4fa8eafe923ac2733001fef8531026f -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py312hc0a28a1_2.conda#8300d634adec4a6aed35a87e90e9cb07 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda#b43233a9e2f62fb94affe5607ea79473 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_0.conda#74674b93806167c26da4eca7613bc225 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py312hc0a28a1_6.conda#fa4853d25b6fbfef5eb7b3e1b5616dd5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_0.conda#d5ee837e9e21dabb505a010c6a196fa6 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_h8657690_705.conda#bba34ade586dc53222d5e0387f7733c2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda#2f4f3854f23be30de29e9e4d39758349 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_100.conda#8e932f27c4339835563f42d73b158d53 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py312hc73667e_209.conda#e2967eddf4ea06a8b645da9967f370be -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py312hc8241c7_209.conda#1354402d09a8614821d6d3c13d826863 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py312he5e186c_209.conda#c6aba64b606a07b20b345b1e4146494b -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.1-pyhd8ed1ab_0.conda#0731b45087c0358ca8b7d9fe855dec1a +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 -https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e - +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/py310.yml b/requirements/py310.yml index d81d4c0d42..cbef5ce676 100644 --- a/requirements/py310.yml +++ b/requirements/py310.yml @@ -14,7 +14,7 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2022.9.0,!=2024.8.0 + - dask-core >=2022.9.0,!=2024.8.0, <2024.9 - libnetcdf !=4.9.1 - matplotlib-base >=3.5, !=3.9.1 - netcdf4 diff --git a/requirements/py311.yml b/requirements/py311.yml index b12c46c87f..ddd6d65ad4 100644 --- a/requirements/py311.yml +++ b/requirements/py311.yml @@ -14,7 +14,7 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2022.9.0,!=2024.8.0 + - dask-core >=2022.9.0,!=2024.8.0, <2024.9 - libnetcdf !=4.9.1 - matplotlib-base >=3.5, !=3.9.1 - netcdf4 diff --git a/requirements/py312.yml b/requirements/py312.yml index 74277e417f..9f6b9b5068 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -14,7 +14,7 @@ dependencies: - cartopy >=0.21 - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2022.9.0,!=2024.8.0 + - dask-core >=2022.9.0,!=2024.8.0, <2024.9 - libnetcdf !=4.9.1 - matplotlib-base >=3.5, !=3.9.1 - netcdf4 diff --git a/requirements/pypi-core.txt b/requirements/pypi-core.txt index 208ef7f413..041ac8a9e4 100644 --- a/requirements/pypi-core.txt +++ b/requirements/pypi-core.txt @@ -1,7 +1,7 @@ cartopy>=0.21 cf-units>=3.1 cftime>=1.5.0 -dask[array]>=2022.9.0,!=2024.8.0 +dask[array]>=2022.9.0,!=2024.8.0, <2024.9 # libnetcdf!=4.9.1 (not available on PyPI) matplotlib>=3.5 netcdf4 From 68f66fe43701cbb8822f903a980e7ce8548b0c13 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 06:11:47 +0000 Subject: [PATCH 36/74] Bump scitools/workflows from 2024.12.0 to 2024.12.3 (#6269) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.12.0 to 2024.12.3. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.12.0...2024.12.3) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index ad0fa50d57..f68084546d 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.3 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 046404c1fe..1a5472a26f 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.3 secrets: inherit From 80bcec7e69cc064360dcf807ebb35a6b38892ed3 Mon Sep 17 00:00:00 2001 From: stephenworsley <49274989+stephenworsley@users.noreply.github.com> Date: Wed, 18 Dec 2024 12:32:35 +0000 Subject: [PATCH 37/74] mrePin dask <2024.9 (#6255) in dask <2024.9 * update lockfiles * update for pypi * add whatsnew, fix tests * skip failing doctests merge conflicts --- requirements/locks/py310-linux-64.lock | 2 +- requirements/locks/py311-linux-64.lock | 2 +- requirements/locks/py312-linux-64.lock | 18 +++++++++--------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 333803d50f..e8db96dbc9 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -120,7 +120,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae1 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 06b6647897..48fb98e43d 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -120,7 +120,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae1 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index f101e6b409..7dd8497d8b 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -120,7 +120,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae1 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 @@ -250,12 +250,13 @@ https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda# https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e -https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c +https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py312h178313f_0.conda#3e92784b8e32ab7d0b95ee296ba79a99 @@ -333,8 +334,7 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https:///conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 From 99bc2d7d6734127cf0428f72de2dc5788a001939 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 19 Dec 2024 13:43:56 +0000 Subject: [PATCH 38/74] removed use of shared utils --- lib/iris/tests/unit/common/metadata/test_microsecond_future.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py index b86ebf06d4..0e83e5c592 100644 --- a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py +++ b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py @@ -8,12 +8,12 @@ import cf_units import numpy as np +from numpy.testing import assert_array_equal from packaging.version import Version import pytest from iris import FUTURE from iris.coords import DimCoord -from iris.tests._shared_utils import assert_array_equal cf_units_legacy = Version(cf_units.__version__) < Version("3.3.0") From fed1e40be7ff864f62f4aa60b313ef160420d9fc Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Fri, 20 Dec 2024 12:19:48 +0000 Subject: [PATCH 39/74] edited whatsnew --- docs/src/whatsnew/latest.rst | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ae13b8a883..7325630f22 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -30,13 +30,7 @@ This document explains the changes made to Iris for this release ✨ Features =========== -#. `@trexfeathers`_ added a new :class:`~iris.Future` flag - - ``date_microseconds`` - which sets whether Iris should use the new - microsecond-precision units (see :class:`cf_units.Unit`, microseconds - introduced in version 3.3) when the unit - is a time unit. The previous maximum precision was seconds. You should check - your code for new floating point problems if activating this (e.g. when - using the :class:`~iris.Constraint` API). (:pull:`6260`) +#. N/A 🐛 Bugs Fixed @@ -56,10 +50,7 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. Note that due to the new ``date_microseconds`` :class:`~iris.Future` flag, - the time coordinate categorisation speedup introduced in - :doc:`/whatsnew/3.11` will only be available when - ``iris.FUTURE.date_microseconds == True``. +#. N/A 🔥 Deprecations @@ -71,8 +62,7 @@ This document explains the changes made to Iris for this release 🔗 Dependencies =============== -#. `@stephenworsley`_ pinned dask to <2024.9 due to an indexing bug. (:issue:`6251`, - :pull:`6255`) +#. N/A 📚 Documentation From d388bdcc97f19787f06beefb864b4392b1005073 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:27:27 +0000 Subject: [PATCH 40/74] What's new updates for v3.11.1 . (#6271) --- docs/src/whatsnew/3.11.rst | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/docs/src/whatsnew/3.11.rst b/docs/src/whatsnew/3.11.rst index 6aba0a5708..abbb486b70 100644 --- a/docs/src/whatsnew/3.11.rst +++ b/docs/src/whatsnew/3.11.rst @@ -34,6 +34,20 @@ This document explains the changes made to Iris for this release And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! +v3.11.1 (19 Dec 2024) +=========================== + +.. dropdown:: |iris_version| Patches + :color: primary + :icon: alert + :animate: fade-in + + The patches in this release of Iris include: + + #. We added in a :class:`~iris.Future` flag - ``date_microseconds``, which + prevents floating point problems arisen from :class:`cf_units.Unit` v3.3. + + #. We pinned dask to <2024.9 to prevent an indexing bug. 📢 Announcements ================ @@ -73,6 +87,14 @@ This document explains the changes made to Iris for this release #. `@ESadek-MO`_ updated to the latest CF Standard Names Table v86 (5 September 2024). (:pull:`6200`) +#. `@trexfeathers`_ added a new :class:`~iris.Future` flag - + ``date_microseconds`` - which sets whether Iris should use the new + microsecond-precision units (see :class:`cf_units.Unit`, microseconds + introduced in version 3.3) when the unit + is a time unit. The previous maximum precision was seconds. You should check + your code for new floating point problems if activating this (e.g. when + using the :class:`~iris.Constraint` API). (:pull:`6260`) + 🐛 Bugs Fixed ============= @@ -116,6 +138,10 @@ This document explains the changes made to Iris for this release the concatenation axis. This issue can be avoided by disabling the problematic check. (:pull:`5926` and :pull:`6187`) +#. Note that due to the new ``date_microseconds`` :class:`~iris.Future` flag, + the time coordinate categorisation speedup introduced above + will only be available when ``iris.FUTURE.date_microseconds == True``. + 🔥 Deprecations =============== @@ -135,6 +161,9 @@ This document explains the changes made to Iris for this release * `NumPy v2 changed scalar printing`_ +#. `@stephenworsley`_ pinned dask to <2024.9 due to an indexing bug. (:issue:`6251`, + :pull:`6255`) + 📚 Documentation ================ From 46d876e548e22a2a5053f6b9dc7d8e295b5abc48 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 20 Dec 2024 13:58:00 +0000 Subject: [PATCH 41/74] Equalise cubes (#6257) * Initial equalise_cubes util. * Initial something working. * Tweaks, improvements, notes. * Initial partial testing * Small tweaks. * Tidy a bit. Test 'unify_time_units'. NB time-units are on coords not cubes. * Fix grouping efficiency. * Review changes: rename vars. * Review changes: rename 'unify_names'. * Review changes: Add warning on null operation. * Review changes: Remove mistaken docstring reference to re-ordering. * Review changes: Simplify in-place replacement of list/array content. * Added whatsnew. * Review changes: explain scrambling; in-place scramble doesn't return result * Added specific test for array attribute handling. * Simplify 'scramble' operation. --- docs/src/whatsnew/latest.rst | 5 + .../tests/unit/util/test_equalise_cubes.py | 313 ++++++++++++++++++ lib/iris/util.py | 137 ++++++++ 3 files changed, 455 insertions(+) create mode 100644 lib/iris/tests/unit/util/test_equalise_cubes.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7325630f22..a69de60f95 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -32,6 +32,11 @@ This document explains the changes made to Iris for this release #. N/A +#. `@pp-mo`_ added a new utility function :func:`~iris.util.equalise_cubes`, to help + with aligning cubes so they can merge / concatenate. + (:issue:`6248`, :pull:`6257`) + + 🐛 Bugs Fixed ============= diff --git a/lib/iris/tests/unit/util/test_equalise_cubes.py b/lib/iris/tests/unit/util/test_equalise_cubes.py new file mode 100644 index 0000000000..5aa0e28c2e --- /dev/null +++ b/lib/iris/tests/unit/util/test_equalise_cubes.py @@ -0,0 +1,313 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the :func:`iris.util.equalise_cubes` function.""" + +import warnings + +from cf_units import Unit +import numpy as np +import pytest + +from iris.coords import DimCoord +from iris.cube import Cube +from iris.util import equalise_cubes +from iris.warnings import IrisUserWarning + + +def _scramble(inputs): + # Reorder items (IN PLACE) to check that order does not affect operation. + # Rather than anything more clever, we'll settle for just reversing the order. + inputs[:] = inputs[::-1] + + +@pytest.fixture(params=["off", "on", "applyall", "scrambled"]) +def usage(request): + # Fixture to check different usage modes for a given operation control keyword + return request.param + + +def _usage_common(usage, op_keyword_name, test_cubes): + kwargs = {} + if usage == "off": + pass + elif usage in ("on", "scrambled"): + kwargs[op_keyword_name] = True + if usage == "scrambled": + # reorder the input cubes, but in-place + _scramble(test_cubes) + elif usage == "applyall": + kwargs["apply_all"] = True + else: + raise ValueError(f"Unrecognised 'usage' option {usage!r}") + default_expected_metadatas = [cube.metadata for cube in test_cubes] + return kwargs, default_expected_metadatas + + +def _cube( + stdname=None, + longname=None, + varname=None, + units="unknown", + cell_methods=(), + **attributes, +): + # Construct a simple test-cube with given metadata properties. + cube = Cube( + [1], + standard_name=stdname, + long_name=longname, + var_name=varname, + cell_methods=cell_methods, + units=units, + attributes=attributes, + ) + return cube + + +_NO_OP_MESSAGE = "'equalise_cubes' call does nothing" + + +class TestNoOperation: + def test(self): + # A standalone test, that a call with no operations enabled raises a warning + with pytest.warns(IrisUserWarning, match=_NO_OP_MESSAGE): + equalise_cubes([]) + + +class WarnChecked: + @pytest.fixture(autouse=True) + def nowarn(self, usage): + if usage == "off": + with pytest.warns(IrisUserWarning, match=_NO_OP_MESSAGE): + yield + else: + with warnings.catch_warnings(): + warnings.simplefilter("error") + yield + + +class TestUnifyNames(WarnChecked): + # Test the 'normalise_names' operation. + def test_simple(self, usage): + sn = "air_temperature" + stdnames = [sn, sn, sn] + longnames = [None, "long1", "long2"] + varnames = ["var1", None, "var2"] + test_cubes = [ + _cube(stdname=stdname, longname=longname, varname=varname) + for stdname, longname, varname in zip(stdnames, longnames, varnames) + ] + kwargs, expected_metadatas = _usage_common(usage, "normalise_names", test_cubes) + + # Calculate expected results + if usage != "off": + # result cube metadata should all be the same, with no varname + meta = _cube(stdname=sn).metadata + expected_metadatas = [meta, meta, meta] + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas + + def test_multi(self, usage): + # Show that different cases are resolved independently + sn1, sn2 = "air_temperature", "air_pressure" + stdnames = [sn1, None, None, None, sn2, None] + longnames = ["long1", "long2", None, None, "long3", None] + varnames = ["var1", None, "var3", "var4", None, None] + test_cubes = [ + _cube(stdname=stdname, longname=longname, varname=varname) + for stdname, longname, varname in zip(stdnames, longnames, varnames) + ] + kwargs, expected_metadatas = _usage_common(usage, "normalise_names", test_cubes) + + # Calculate expected results + if usage != "off": + stdnames = [sn1, None, None, None, sn2, None] + longnames = [None, "long2", None, None, None, None] + varnames = [None, None, "var3", "var4", None, None] + expected_metadatas = [ + _cube(stdname=stdname, longname=longname, varname=varname).metadata + for stdname, longname, varname in zip(stdnames, longnames, varnames) + ] + if usage == "scrambled": + _scramble(expected_metadatas) + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas + + +class TestEqualiseAttributes(WarnChecked): + # Test the 'equalise_attributes' operation. + def test_calling(self, usage, mocker): + patch = mocker.patch("iris.util.equalise_attributes") + test_cubes = [_cube()] + kwargs, expected_metadatas = _usage_common( + usage, "equalise_attributes", test_cubes + ) + + # Apply operation + equalise_cubes(test_cubes, **kwargs) + + expected_calls = 0 if usage == "off" else 1 + assert len(patch.call_args_list) == expected_calls + + def test_basic_function(self, usage): + test_cubes = [_cube(att_a=10, att_b=1), _cube(att_a=10, att_b=2)] + kwargs, expected_metadatas = _usage_common( + usage, "equalise_attributes", test_cubes + ) + + # Calculate expected results + if usage != "off": + # result cube metadata should all be the same, with no varname + meta = _cube(att_a=10).metadata + expected_metadatas = [meta, meta] + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas + + def test_operation_in_groups(self, usage): + # Check that it acts independently within groups (as defined, here, by naming) + test_cubes = [ + _cube(longname="a", att_a=10, att_b=1), + _cube(longname="a", att_a=10, att_b=2), + _cube(longname="b", att_a=10, att_b=1), + _cube(longname="b", att_a=10, att_b=1), + ] + kwargs, expected_metadatas = _usage_common( + usage, "equalise_attributes", test_cubes + ) + + # Calculate expected results + if usage != "off": + # result cube metadata should all be the same, with no varname + expected_metadatas = [ + # the "a" cubes have lost att_b, but the "b" cubes retain it + _cube(longname="a", att_a=10).metadata, + _cube(longname="a", att_a=10).metadata, + _cube(longname="b", att_a=10, att_b=1).metadata, + _cube(longname="b", att_a=10, att_b=1).metadata, + ] + if usage == "scrambled": + _scramble(expected_metadatas) + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas + + def test_array_attributes(self, usage): + # Array content is worth a special test because it breaks dictionary equality. + a1 = np.array([4.1, 5.2, 6.3]) + a2 = np.array([1, 2]) + a3 = np.array([1, 3]) + test_cubes = [ + _cube(longname="a", v1=a1, v2=a2), + _cube(longname="a", v1=a1, v2=a3), + _cube(longname="b", v1=a1, v2=a2), + _cube(longname="b", v1=a1, v2=a2), + ] + kwargs, expected_metadatas = _usage_common( + usage, "equalise_attributes", test_cubes + ) + + # Calculate expected results + if usage != "off": + # result cube metadata should all be the same, with no varname + result_cubes = [ + _cube(longname="a", v1=a1), + _cube(longname="a", v1=a1), + _cube(longname="b", v1=a1, v2=a2), + _cube(longname="b", v1=a1, v2=a2), + ] + expected_metadatas = [cube.metadata for cube in result_cubes] + if usage == "scrambled": + _scramble(expected_metadatas) + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas + + +class TestUnifyTimeUnits(WarnChecked): + # Test the 'unify_time_units' operation. + def test_calling(self, usage, mocker): + patch = mocker.patch("iris.util.unify_time_units") + test_cubes = [_cube()] + kwargs, expected_metadatas = _usage_common( + usage, "unify_time_units", test_cubes + ) + + # Apply operation + equalise_cubes(test_cubes, **kwargs) + + expected_calls = 0 if usage == "off" else 1 + assert len(patch.call_args_list) == expected_calls + + def _cube_timeunits(self, unit, **kwargs): + cube = _cube(**kwargs) + cube.add_dim_coord(DimCoord([0.0], standard_name="time", units=unit), 0) + return cube + + def test_basic_function(self, usage): + if usage == "scrambled": + pytest.skip("scrambled mode not supported") + tu1, tu2 = [Unit(name) for name in ("days since 1970", "days since 1971")] + cu1, cu2 = self._cube_timeunits(tu1), self._cube_timeunits(tu2) + test_cubes = [cu1, cu2] + kwargs, expected_metadatas = _usage_common( + usage, "unify_time_units", test_cubes + ) + + expected_units = [tu1, tu2 if usage == "off" else tu1] + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.coord("time").units for cube in results] == expected_units + + def test_operation_in_groups(self, usage): + # Check that it acts independently within groups (as defined, here, by naming) + test_cubes = [ + _cube(longname="a", att_a=10, att_b=1), + _cube(longname="a", att_a=10, att_b=2), + _cube(longname="b", att_a=10, att_b=1), + _cube(longname="b", att_a=10, att_b=1), + ] + kwargs, expected_metadatas = _usage_common( + usage, "equalise_attributes", test_cubes + ) + + # Calculate expected results + if usage != "off": + # result cube metadata should all be the same, with no varname + expected_metadatas = [ + # the "a" cubes have lost att_b, but the "b" cubes retain it + _cube(longname="a", att_a=10).metadata, + _cube(longname="a", att_a=10).metadata, + _cube(longname="b", att_a=10, att_b=1).metadata, + _cube(longname="b", att_a=10, att_b=1).metadata, + ] + if usage == "scrambled": + _scramble(expected_metadatas) + + # Apply operation + results = equalise_cubes(test_cubes, **kwargs) + + # Assert result + assert [cube.metadata for cube in results] == expected_metadatas diff --git a/lib/iris/util.py b/lib/iris/util.py index a808087fd8..9681ab1484 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -8,6 +8,7 @@ from abc import ABCMeta, abstractmethod from collections.abc import Hashable, Iterable +from copy import deepcopy import functools import inspect import os @@ -15,6 +16,7 @@ import sys import tempfile from typing import Literal +from warnings import warn import cf_units from dask import array as da @@ -27,6 +29,7 @@ from iris.common import SERVICES from iris.common.lenient import _lenient_client import iris.exceptions +import iris.warnings def broadcast_to_shape(array, shape, dim_map, chunks=None): @@ -2189,3 +2192,137 @@ def mask_cube_from_shapefile(cube, shape, minimum_weight=0.0, in_place=False): masked_cube = mask_cube(cube, shapefile_mask, in_place=in_place) if not in_place: return masked_cube + + +def equalise_cubes( + cubes, + apply_all=False, + normalise_names=False, + equalise_attributes=False, + unify_time_units=False, +): + """Modify a set of cubes to assist merge/concatenate operations. + + Various different adjustments can be applied to the input cubes, to remove + differences which may prevent them from combining into larger cubes. The requested + "equalisation" operations are applied to each group of input cubes with matching + cube metadata (names, units, attributes and cell-methods). + + Parameters + ---------- + cubes : sequence of :class:`~iris.cube.Cube` + The input cubes, in a list or similar. + + apply_all : bool, default=False + Enable *all* the equalisation operations. + + normalise_names : bool, default=False + When True, remove any redundant ``var_name`` and ``long_name`` properties, + leaving only one ``standard_name``, ``long_name`` or ``var_name`` per cube. + In this case, the adjusted names are also used when selecting input groups. + + equalise_attributes : bool, default=False + When ``True``, apply an :func:`equalise_attributes` operation to each input + group. In this case, attributes are ignored when selecting input groups. + + unify_time_units : bool, default=False + When True, apply the :func:`unify_time_units` operation to each input group. + Note : while this may convert units of time reference coordinates, it does + not affect the units of the cubes themselves. + + Returns + ------- + :class:`~iris.cube.CubeList` + A CubeList containing the original input cubes, modified as required (in-place) + ready for merge or concatenate operations. + + Notes + ----- + All the 'equalise' operations operate in a similar fashion, in that they identify + and remove differences in a specific metadata element, altering metadata so that + a merge or concatenate can potentially combine a group of cubes into a single + result cube. + + The various 'equalise' operations are not applied to the entire input, but to + groups of input cubes with the same ``cube.metadata``. + + The input cube groups also depend on the equalisation operation(s) selected : + Operations which equalise a specific cube metadata element (names, units, + attributes or cell-methods) exclude that element from the input grouping criteria. + + """ + from iris.common.metadata import CubeMetadata + from iris.cube import CubeList + + if normalise_names or apply_all: + # Rationalise all the cube names + # Note: this option operates as a special case, independent of + # and *in advance of* the group selection + # (hence, it affects the groups which other operations are applied to) + for cube in cubes: + if cube.standard_name: + cube.long_name = None + cube.var_name = None + elif cube.long_name: + cube.var_name = None + + # Snapshot the cube metadata elements which we use to identify input groups + # TODO: we might want to sanitise practically comparable types here ? + # (e.g. large object arrays ??) + cube_grouping_values = [ + { + field: deepcopy(getattr(cube.metadata, field)) + for field in CubeMetadata._fields + } + for cube in cubes + ] + + # Collect the selected operations which we are going to apply. + equalisation_ops = [] + + if equalise_attributes or apply_all: + # get the function of the same name in this module + equalisation_ops.append(globals()["equalise_attributes"]) + # Prevent attributes from distinguishing input groups + for grouping_values in cube_grouping_values: + grouping_values.pop("attributes") + + if unify_time_units or apply_all: + # get the function of the same name in this module + equalisation_ops.append(globals()["unify_time_units"]) + + if not equalisation_ops: + if not normalise_names: + msg = ( + "'equalise_cubes' call does nothing, as no equalisation operations " + "are enabled (neither `apply_all` nor any individual keywords set)." + ) + warn(msg, category=iris.warnings.IrisUserWarning) + + else: + # NOTE: if no "equalisation_ops", nothing more to do. + # However, if 'unify-names' was done, we *already* modified cubes in-place. + + # Group the cubes into sets with the same 'grouping values'. + # N.B. we *can't* use sets, or dictionary key checking, as our 'values' are not + # always hashable -- e.g. especially, array attributes. + # I fear this can be inefficient (repeated array compare), but maybe unavoidable + # TODO: might something nasty happen here if attributes contain weird stuff ?? + cubegroup_values = [] + cubegroup_cubes = [] + for cube, grouping_values in zip(cubes, cube_grouping_values): + if grouping_values not in cubegroup_values: + cubegroup_values.append(grouping_values) + cubegroup_cubes.append([cube]) + else: + i_at = cubegroup_values.index(grouping_values) + cubegroup_cubes[i_at].append(cube) + + # Apply operations to the groups : in-place modifications on the cubes + for group_cubes in cubegroup_cubes: + for op in equalisation_ops: + op(group_cubes) + + # Return a CubeList result = the *original* cubes, as modified + result = CubeList(cubes) + return result From fa29936ff407c43342ab8111744ec8f4c0e47fb6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 18:22:43 +0000 Subject: [PATCH 42/74] [pre-commit.ci] pre-commit autoupdate (#6274) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.3 → v0.8.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.3...v0.8.4) - [github.com/pre-commit/mirrors-mypy: v1.13.0 → v1.14.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.13.0...v1.14.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 21eca9dff4..2d8643af38 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.3" + rev: "v0.8.4" hooks: - id: ruff types: [file, python] @@ -63,7 +63,7 @@ repos: types: [file, python] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.13.0' + rev: 'v1.14.0' hooks: - id: mypy additional_dependencies: From f58c7878a77fcf60bf9516db0e6531e1131a4cdb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Dec 2024 15:51:01 +0000 Subject: [PATCH 43/74] Bump scitools/workflows from 2024.12.3 to 2024.12.4 (#6275) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.12.3 to 2024.12.4. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.12.3...2024.12.4) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index f68084546d..059c29337d 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.3 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.4 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 1a5472a26f..f99a6f1b4a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.3 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.4 secrets: inherit From 47f455b6a00a8b2bf84966f0b4d5404d6fd79795 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 11:06:28 +0000 Subject: [PATCH 44/74] Bump scitools/workflows from 2024.12.4 to 2024.12.5 (#6276) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.12.4 to 2024.12.5. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.12.4...2024.12.5) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 059c29337d..81edc9ba17 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.4 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.5 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index f99a6f1b4a..c4f1c6892c 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.4 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.5 secrets: inherit From d03566972b70c2aa6c519f5a1fe42f70972c2496 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 3 Jan 2025 12:26:09 +0000 Subject: [PATCH 45/74] Adopt update common refresh-lockfiles action. (#6278) --- .github/workflows/refresh-lockfiles.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index c4f1c6892c..f382a391a5 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2024.12.5 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.0 secrets: inherit From d09b683eddfa0897bd954a9cba1a1818a7e3e3d5 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:06:14 +0000 Subject: [PATCH 46/74] Updated environment lockfiles (#6279) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 63 ++++++------- requirements/locks/py311-linux-64.lock | 105 ++++++++++----------- requirements/locks/py312-linux-64.lock | 123 ++++++++++++------------- 3 files changed, 141 insertions(+), 150 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index e8db96dbc9..6c6c649b72 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -27,7 +27,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 +https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e @@ -42,7 +44,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -52,7 +54,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 @@ -62,10 +63,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 @@ -128,7 +128,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_4.conda#af19508df9d2e9f6894a9076a0857dc7 https://conda.anaconda.org/conda-forge/linux-64/python-3.10.16-he725a3c_1_cpython.conda#b887811a901b3aa622a92caf03bc8917 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -148,8 +148,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b3 https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 @@ -162,7 +162,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1. https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py310ha75aee5_0.conda#8aac4068f272b6bdeb0aa0f29d8e516f -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 @@ -170,7 +170,7 @@ https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py310h3788b33_0.conda#4186d9b4d004b0fe0de6aa62496fb48a https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 @@ -180,7 +180,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.6-ha7bfdaf_0.conda#ec6abc65eefc96cba8443b2716dcc43b https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py310hff52083_2.conda#4e8b2a2851668c8ad4d5360845281be9 @@ -189,14 +189,13 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py310h3788b https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py310ha75aee5_0.conda#dbc29ca007ac8bd41fb6921c6317740b -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py310ha75aee5_0.conda#a42a2ed94df11c5cfa5348a317e1f197 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py310ha75aee5_0.conda#00838ea1d4e87b1e6e2552bba98cc899 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f @@ -217,6 +216,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py310ha75aee5_0.conda#166d59aab40b9c607b4cc21c03924e9d https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py310ha75aee5_1.conda#ee18e67b0bd283f6a75369936451d6ac +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b @@ -226,30 +226,30 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0. https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 -https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py310h8deb56e_0.conda#1fc24a3196ad5ede2a68148be61894f4 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py310h89163eb_0.conda#02795aff079fa439dbc85b4e19f9a122 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py310h89163eb_0.conda#f9bf6ea6ddf8349750f1b455f603b0ae https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py310ha75aee5_0.conda#d0be1adaa04a03aed745f3d02afb59ce -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py310h89163eb_0.conda#edd1be5d8c667f5e53667433e84efabc +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py310h89163eb_1.conda#c81251a712a36b477ed2330ec0e1a299 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.6-default_hb5137d0_0.conda#9caebd39281536bf6bcb32f665dd4fbf +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.6-default_h9c6a7e4_0.conda#e1d2936c320083f1c520c3a17372521c https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py310h5851e9f_0.conda#b36342af1ea0eb44bb6ccdefcb9d80d7 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py310h5851e9f_0.conda#d38cb65becc66134ed42a02e6155e8e0 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py310hfeaa1f3_0.conda#1947280342c7259b82a707e38ebc212e +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e @@ -272,6 +272,7 @@ https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py310ha75aee5_1.co https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py310h89163eb_2.conda#51e1600159ad5bc732f761a11c285189 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py310h5eaa309_1.conda#e67778e1cac3bca3b3300f6164f7ffb9 https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py310hf462985_2.conda#79f0b0f4ddfa86d17b061bab22533af1 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py310h2e9f774_0.conda#42a3ea3c283d930ae6d156b97ffe4740 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 @@ -281,15 +282,14 @@ https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py310hfcf56fc_2.conda#b5d548b2a7cf8d0c74fc6c4bf42d1ca5 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py310had3dfd6_2.conda#a4166b41e54d22e794859641b7cae2d0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310h3788b33_5.conda#e05b0475166b68c9dc4d7937e0315654 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py310ha39cb0e_1.conda#f49de34fb99934bf49ab330b5caffd64 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py310hf462985_0.conda#c31938674e4cda43617a4d70f99ffd0c https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d @@ -308,16 +308,17 @@ https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py310h686 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py310h5146f0f_101.conda#3e17df8e4192431b85afc89a539f6c28 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py310ha75aee5_0.conda#4d5fc35ca762815c1b159b710cb22897 -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py310h89163eb_0.conda#88ec741ee5b8132d2824dd0034c2f67c +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py310h89163eb_0.conda#b32279f208a4ce5c03767ec079d2aae3 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py310h5eaa309_0.conda#ca4d935c1715f95b6e86846ad1675a2b https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310hf462985_3.conda#7fd2a4e83e8ff3a760984300dad6297c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 @@ -328,11 +329,11 @@ https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py310h9617cfe_ https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py310hc8241c7_209.conda#063eb6107225478aa00f283b102f3ec8 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py310he5e186c_209.conda#03fd79331809ea4812c5430e47c04723 https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c -https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 +https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_1.conda#aa09c826cf825f905ade2586978263ca https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 48fb98e43d..c98854badd 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -27,7 +27,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 +https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e @@ -42,7 +44,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -50,9 +52,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 -https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 @@ -62,26 +62,23 @@ https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 -https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda#c2f83a5ddadadcdb08fe05863295ee97 -https://conda.anaconda.org/conda-forge/linux-64/eigen-3.4.0-h00ab1b0_0.conda#b1b879d6d093f55dd40d58b5eb2f0699 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5994ef49749880a8139cf9afcbe1 @@ -102,7 +99,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f -https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 @@ -116,7 +112,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f @@ -128,7 +123,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_4.conda#af19508df9d2e9f6894a9076a0857dc7 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.11-h9e4cc4f_1_cpython.conda#8387070aa413ce9a8cc35a509fae938b https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -147,8 +142,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b3 https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 @@ -161,7 +156,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1. https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py311h9ecbd09_0.conda#75424a18fb275a18b288c099b869c3bc -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 @@ -169,7 +164,7 @@ https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py311hd18a35c_0.conda#be34c90cce87090d24da64a7c239ca96 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 @@ -179,7 +174,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.6-ha7bfdaf_0.conda#ec6abc65eefc96cba8443b2716dcc43b https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py311h38be061_2.conda#733b481d20ff260a34f2b0003ff4fbb3 @@ -189,14 +184,13 @@ https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py311h2dc5d0c_2. https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py311h9ecbd09_0.conda#20d1c4ad24ac50f0941c63e81e4a86b7 -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py311h9ecbd09_0.conda#0ffc1f53106a38f059b151c465891ed3 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py311h9ecbd09_0.conda#c78bfbe5ad64c25c2f55d57a805ba2d2 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f @@ -217,6 +211,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py311h9ecbd09_0.conda#df3aee9c3e44489257a840b8354e77b9 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py311h9ecbd09_1.conda#00895577e2b4c24dca76675ab1862551 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b @@ -226,30 +221,30 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0. https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 -https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py311hf29c0ef_0.conda#55553ecd5328336368db611f350b7039 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py311h2dc5d0c_0.conda#098c90e7d8761167e0f54ed6f81ee2f0 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py311h2dc5d0c_0.conda#2a772b30e69ba8319651e9f3ab01608f https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py311h9ecbd09_0.conda#69a0a85acdcc5e6d0f1cc915c067ad4c -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py311h2dc5d0c_0.conda#27bc755bed4972c51f4d2789f2cde56c +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py311h2dc5d0c_1.conda#04c0b385767445be8aefe0d4915cb747 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.6-default_hb5137d0_0.conda#9caebd39281536bf6bcb32f665dd4fbf +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.6-default_h9c6a7e4_0.conda#e1d2936c320083f1c520c3a17372521c https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py311hf916aec_0.conda#82c097817ff68e7b6f5db63cdcb593d2 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py311hf916aec_0.conda#54a2d01ec2cfc01644f17d25978b0ecf https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py311h49e9ac3_0.conda#2bd3d0f839ec0d1eaca817c9d1feb7c2 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e @@ -258,7 +253,7 @@ https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py311h9ecbd09_0.conda#385d54815a5d2e74e68374d77446030b -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py311h2dc5d0c_0.conda#7ddc4f7d7120a103af3e06cf7f7e7fb1 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py311h2dc5d0c_0.conda#098c05da2799d9300eec94c24a7c8bda https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py311h9f3472d_1.conda#2c3c4f115d28ed9e001a271d5d8585aa https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd @@ -268,11 +263,12 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.5.0-hac27bb2_0.conda#9b7a4ae9edab6f9604f56b790c3e1d02 https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py311h9ecbd09_1.conda#28d6b63784b350a2906dc264ad8c7f2a https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py311h7db5c69_1.conda#643f8cb35133eb1be4919fb953f0a25f https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py311h9f3472d_2.conda#72b6f1a496a67977b772b53fae55308a https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py311h0f98d5a_0.conda#22531205a97c116251713008d65dfefd https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 @@ -282,56 +278,55 @@ https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py311h9f3472d_0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py311he9a78e4_2.conda#c4aee8cadc4c9fc9a91aca0803473690 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py311h2fdb869_2.conda#4c78235905053663d1c9e23df3f11b65 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311hd18a35c_5.conda#4e8447ca8558a203ec0577b4730073f3 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311hbc35293_1.conda#aec590674ba365e50ae83aa2d6e1efae https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py311h9f3472d_0.conda#555b148cafbd96b658499060d5e11a65 https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.4.0-hac27bb2_2.conda#6cfc840bc39c17d92fb25e5a35789e5b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.4.0-hac27bb2_2.conda#9e9814b40d8fdfd8485451e3fa2f1719 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.4.0-hac27bb2_2.conda#724719ce97feb6f310f88ae8dbb40afd -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.4.0-h3f63f65_2.conda#8908f31eab30f65636eb61ab9cb1f3ad -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.4.0-h5c8f2c3_2.conda#e098caa87868e8dcc7ed5d011981207d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.4.0-h5c8f2c3_2.conda#59bb8c3502cb9d35f1fb26691730288c -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.4.0-h5888daf_2.conda#e0b88fd64dc95f715ef52e607a9af89b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.5.0-h4d9b6c2_0.conda#c787d5a3d5b0776f0336004583297536 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.5.0-h4d9b6c2_0.conda#ad1ed56f60ec9a8e710703f38b860315 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.5.0-h3f63f65_0.conda#5bcc0022e2565606e3af7395ec3e156d +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.5.0-hac27bb2_0.conda#594ab1b892569c9cd15bcae9781a42b2 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.5.0-hac27bb2_0.conda#485e057ea6a17096b0539ca7473e4829 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.5.0-hac27bb2_0.conda#f0a9e23aa51b84be0a74a4518d4020ca +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.5.0-h3f63f65_0.conda#ae37e91183788f64935657b255cbff21 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.5.0-h5c8f2c3_0.conda#23e82dd5b616fa8879620609428791c9 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.5.0-h5c8f2c3_0.conda#ecf440381b082f7d2b9cb66d62d76efb +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.5.0-h5888daf_0.conda#a5baecc3ef0d0cca99d08cf335c06c03 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.5.0-h6481b9d_0.conda#698ad10adfc7aa6553392677fffe054f +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.5.0-h5888daf_0.conda#1c25d4e1965049a85c83762eaecb4436 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py311h2b939e6_0.conda#79239585ea50c427415ef629534bb3aa https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py311h7c29e4f_101.conda#d966f11d28c699da7e9de2aa2f323a4f https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py311h7db5c69_0.conda#20ba399d57a2b5de789a5b24341481a1 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h4c12d27_707.conda#aee131a2c291ca7d0d703900515aa772 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h9f3472d_3.conda#a7c4169b1c920361597ddacb461350fd https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py311h7158b74_209.conda#011801a68c022cf9692a4567d84678ca +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py311h5011d52_211.conda#92a2d6c0c3bd77a20a1b74c8941910b0 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py311hc8241c7_209.conda#13fdaae5c7c5c76089ca76f63b287ef5 -https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py311he5e186c_209.conda#54a9526336ff06739344f87726cbc61e +https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py311h3d4e8c9_211.conda#5b887ad0f1d5f462f1f509df54d2c711 +https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py311h3d4e8c9_211.conda#cfd2ce8f51539f6e52e83922b5235a79 https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c -https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 +https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_1.conda#aa09c826cf825f905ade2586978263ca https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 7dd8497d8b..4affa05c3f 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -27,7 +27,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 +https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e @@ -42,7 +44,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -50,9 +52,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 -https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 @@ -62,26 +62,23 @@ https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_3.conda#9411c61ff1070b5e065b32840c39faa5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 -https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda#c2f83a5ddadadcdb08fe05863295ee97 -https://conda.anaconda.org/conda-forge/linux-64/eigen-3.4.0-h00ab1b0_0.conda#b1b879d6d093f55dd40d58b5eb2f0699 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5994ef49749880a8139cf9afcbe1 @@ -102,7 +99,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f -https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 @@ -116,11 +112,10 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 @@ -128,7 +123,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_3.conda#dd9da69dd4c2bf798c0b8bd4786cafb5 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_4.conda#af19508df9d2e9f6894a9076a0857dc7 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c @@ -147,8 +142,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b3 https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 @@ -161,7 +156,7 @@ https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1. https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 @@ -169,7 +164,7 @@ https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_0.conda#faf232274689aa60da5a63e7cc5faeb7 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 @@ -179,7 +174,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.5-ha7bfdaf_0.conda#76f3749eda7b24816aacd55b9f31447a +https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.6-ha7bfdaf_0.conda#ec6abc65eefc96cba8443b2716dcc43b https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py312h7900ff3_2.conda#fddd3092f921be8e01b18f2a0266d98f @@ -189,14 +184,13 @@ https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_2. https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_1.conda#04b95993de18684b24bb742ffe0e90a8 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h66e93f0_0.conda#55d5742a696d7da1c1262e99b6217ceb -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py312h66e93f0_0.conda#add2c79595fa8a9b6d653d7e4e2cf05f https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f @@ -217,6 +211,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda#e417822cb989e80a0d2b1b576fdd1657 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b @@ -226,40 +221,39 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0. https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 -https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py312h178313f_0.conda#a6a5f52f8260983b0aaeebcebf558a3e +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py312h178313f_0.conda#df113f58bdfc79c98f5e07b6bd3eb4c2 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_0.conda#968104bfe69e21fadeb30edd9c3785f9 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_1.conda#bc18c46eda4c2b29431981998507e723 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.5-default_hb5137d0_0.conda#ec8649c89988d8a443c252c20f259b72 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.5-default_h9c6a7e4_0.conda#a3a5997b6b47373f0c1608d8503eb4e6 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.6-default_hb5137d0_0.conda#9caebd39281536bf6bcb32f665dd4fbf +https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.6-default_h9c6a7e4_0.conda#e1d2936c320083f1c520c3a17372521c https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py312h7e784f5_0.conda#c9e9a81299192e77428f40711a4fb00d +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.10-py312h178313f_0.conda#3e92784b8e32ab7d0b95ee296ba79a99 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd @@ -269,11 +263,12 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.5.0-hac27bb2_0.conda#9b7a4ae9edab6f9604f56b790c3e1d02 https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 @@ -283,58 +278,58 @@ https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_2.conda#94688dd449f6c092e5f951780235aca1 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_1.conda#af684ea869a37193a5c116a9aabf659a -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.4.0-hac27bb2_2.conda#6cfc840bc39c17d92fb25e5a35789e5b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.4.0-hac27bb2_2.conda#9e9814b40d8fdfd8485451e3fa2f1719 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.4.0-hac27bb2_2.conda#724719ce97feb6f310f88ae8dbb40afd -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.4.0-h3f63f65_2.conda#8908f31eab30f65636eb61ab9cb1f3ad -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.4.0-h5c8f2c3_2.conda#e098caa87868e8dcc7ed5d011981207d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.4.0-h5c8f2c3_2.conda#59bb8c3502cb9d35f1fb26691730288c -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.4.0-h5888daf_2.conda#e0b88fd64dc95f715ef52e607a9af89b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.5.0-h4d9b6c2_0.conda#c787d5a3d5b0776f0336004583297536 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.5.0-h4d9b6c2_0.conda#ad1ed56f60ec9a8e710703f38b860315 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.5.0-h3f63f65_0.conda#5bcc0022e2565606e3af7395ec3e156d +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.5.0-hac27bb2_0.conda#594ab1b892569c9cd15bcae9781a42b2 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.5.0-hac27bb2_0.conda#485e057ea6a17096b0539ca7473e4829 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.5.0-hac27bb2_0.conda#f0a9e23aa51b84be0a74a4518d4020ca +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.5.0-h3f63f65_0.conda#ae37e91183788f64935657b255cbff21 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.5.0-h5c8f2c3_0.conda#23e82dd5b616fa8879620609428791c9 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.5.0-h5c8f2c3_0.conda#ecf440381b082f7d2b9cb66d62d76efb +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.5.0-h5888daf_0.conda#a5baecc3ef0d0cca99d08cf335c06c03 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.5.0-h6481b9d_0.conda#698ad10adfc7aa6553392677fffe054f +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.5.0-h5888daf_0.conda#1c25d4e1965049a85c83762eaecb4436 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h4c12d27_707.conda#aee131a2c291ca7d0d703900515aa772 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py312hc73667e_209.conda#e2967eddf4ea06a8b645da9967f370be +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py312he62ef8e_211.conda#172c504fa3f6f83f0a4c7d69abb9fe05 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py312hc8241c7_209.conda#1354402d09a8614821d6d3c13d826863 -https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py312he5e186c_209.conda#c6aba64b606a07b20b345b1e4146494b +https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py312h3d4e8c9_211.conda#8f47d88e25b4592dd2ce599b00d2a340 +https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py312h3d4e8c9_211.conda#4ef0f89cbdda0a3681f183642a8e3f41 https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c -https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_0.conda#4b12a3321889056bf9a000be9a0763b3 +https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_0.conda#ac832cc43adc79118cf6e23f1f9b8995 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_1.conda#db0f1eb28b6df3a11e89437597309009 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_0.conda#dc78276cbf5ec23e4b959d1bbd9caadb +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.18.0-pyhd8ed1ab_1.conda#aa09c826cf825f905ade2586978263ca https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b -https:///conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 From 599d7a9a70ae31ebccd23044f131aea6f07d050b Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Sat, 4 Jan 2025 06:24:48 +0000 Subject: [PATCH 47/74] Updated environment lockfiles (#6280) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 6 +++--- requirements/locks/py311-linux-64.lock | 6 +++--- requirements/locks/py312-linux-64.lock | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 6c6c649b72..05ea515407 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -147,7 +147,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hf71b8c https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 @@ -248,14 +248,14 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py310h5851e9f_0.conda#d38cb65becc66134ed42a02e6155e8e0 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py310hfeaa1f3_0.conda#1947280342c7259b82a707e38ebc212e +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py310h7e6dc6c_0.conda#14d300b9e1504748e70cc6499a7b4d25 https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py310hf462985_1.conda#c2d5289e6cbcecf2c549e01772fe5274 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index c98854badd..57d182bb58 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -141,7 +141,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hfdbb02 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 @@ -243,14 +243,14 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py311hf916aec_0.conda#54a2d01ec2cfc01644f17d25978b0ecf https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py311h49e9ac3_0.conda#2bd3d0f839ec0d1eaca817c9d1feb7c2 +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py311h1322bbf_0.conda#9f4f5593335f76c1dbf7381c11fe7155 https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py311h9ecbd09_0.conda#385d54815a5d2e74e68374d77446030b https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py311h2dc5d0c_0.conda#098c05da2799d9300eec94c24a7c8bda diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 4affa05c3f..9b6194f0b8 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -141,7 +141,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cd https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 @@ -243,14 +243,14 @@ https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 From 48bda8dbbb9e73ba791ed9b4c5a1f38d7c8e607d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 22:32:00 +0000 Subject: [PATCH 48/74] [pre-commit.ci] pre-commit autoupdate (#6281) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.4 → v0.8.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.4...v0.8.6) - [github.com/pre-commit/mirrors-mypy: v1.14.0 → v1.14.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.14.0...v1.14.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2d8643af38..561494b206 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.4" + rev: "v0.8.6" hooks: - id: ruff types: [file, python] @@ -63,7 +63,7 @@ repos: types: [file, python] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.14.0' + rev: 'v1.14.1' hooks: - id: mypy additional_dependencies: From 9cd6998457c58ddb7d8e5b2e45bc708722cc6614 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 06:25:30 +0000 Subject: [PATCH 49/74] Bump scitools/workflows from 2024.12.5 to 2025.01.1 (#6282) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2024.12.5 to 2025.01.1. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2024.12.5...2025.01.1) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 81edc9ba17..14eb2661cf 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2024.12.5 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.1 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index f382a391a5..244ceffabf 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.1 secrets: inherit From 8df3b42a83f257aa0c45df96bc5050499bae1afc Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Sat, 11 Jan 2025 06:18:21 +0000 Subject: [PATCH 50/74] Updated environment lockfiles (#6283) Co-authored-by: Lockfile bot --- requirements/locks/py310-linux-64.lock | 64 +++++++------- requirements/locks/py311-linux-64.lock | 114 ++++++++++++------------- requirements/locks/py312-linux-64.lock | 98 ++++++++------------- 3 files changed, 117 insertions(+), 159 deletions(-) diff --git a/requirements/locks/py310-linux-64.lock b/requirements/locks/py310-linux-64.lock index 05ea515407..4db8dfd206 100644 --- a/requirements/locks/py310-linux-64.lock +++ b/requirements/locks/py310-linux-64.lock @@ -31,7 +31,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 @@ -44,7 +44,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -52,12 +52,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 -https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 @@ -65,23 +64,22 @@ https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.co https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 +https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/x264-1!164.3095-h166bdaf_2.tar.bz2#6c99772d483f566d59e25037fea2c4b1 https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 -https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.3.0-h59595ed_0.conda#c2f83a5ddadadcdb08fe05863295ee97 -https://conda.anaconda.org/conda-forge/linux-64/eigen-3.4.0-h00ab1b0_0.conda#b1b879d6d093f55dd40d58b5eb2f0699 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5994ef49749880a8139cf9afcbe1 @@ -91,7 +89,7 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b1893 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 @@ -99,10 +97,8 @@ https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.co https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f -https://conda.anaconda.org/conda-forge/linux-64/openh264-2.4.1-h59595ed_0.conda#3dfcf61b8e78af08110f5229f79580af https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 @@ -116,9 +112,8 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 @@ -194,7 +189,7 @@ https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py310ha75aee5_0.conda#dbc29ca007ac8bd41fb6921c6317740b https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py310ha75aee5_0.conda#00838ea1d4e87b1e6e2552bba98cc899 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda#232fb4577b6687b2d503ef8e254270c9 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac @@ -203,7 +198,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py310ha75aee https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py310ha75aee5_1.conda#0d4c5c76ae5f5aac6f0be419963a19dd https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -266,7 +261,7 @@ https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.4.0-hac27bb2_2.conda#ba5ac0bb9ec5aec38dec37c230b12d64 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.5.0-hac27bb2_0.conda#9b7a4ae9edab6f9604f56b790c3e1d02 https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py310ha75aee5_1.conda#48781b625a5c7701e04d222752cb2f62 https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py310h89163eb_2.conda#51e1600159ad5bc732f761a11c285189 @@ -279,30 +274,29 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py310hf462985_0.conda#4c441eff2be2e65bd67765c5642051c5 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py310hfcf56fc_2.conda#b5d548b2a7cf8d0c74fc6c4bf42d1ca5 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py310hfa6ec8c_1.conda#ea64e2892554e79d3ce5a05bd5166240 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py310had3dfd6_2.conda#a4166b41e54d22e794859641b7cae2d0 -https://conda.anaconda.org/conda-forge/linux-64/tbb-devel-2022.0.0-h1f99690_0.conda#52317967d0c3dc2ef6f73c2e6a60e005 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310h3788b33_5.conda#e05b0475166b68c9dc4d7937e0315654 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py310ha39cb0e_1.conda#f49de34fb99934bf49ab330b5caffd64 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py310hf462985_0.conda#c31938674e4cda43617a4d70f99ffd0c https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.4.0-h4d9b6c2_2.conda#1d05a25da36ba5f98291d7237fc6b8ce -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.4.0-h4d9b6c2_2.conda#838b2db868f9ab69a7bad9c065a3362d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.4.0-h3f63f65_2.conda#00a6127960a3f41d4bfcabd35d5fbeec -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.4.0-hac27bb2_2.conda#6cfc840bc39c17d92fb25e5a35789e5b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.4.0-hac27bb2_2.conda#9e9814b40d8fdfd8485451e3fa2f1719 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.4.0-hac27bb2_2.conda#724719ce97feb6f310f88ae8dbb40afd -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.4.0-h3f63f65_2.conda#8908f31eab30f65636eb61ab9cb1f3ad -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.4.0-h5c8f2c3_2.conda#e098caa87868e8dcc7ed5d011981207d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.4.0-h5c8f2c3_2.conda#59bb8c3502cb9d35f1fb26691730288c -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.4.0-h5888daf_2.conda#e0b88fd64dc95f715ef52e607a9af89b -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.4.0-h6481b9d_2.conda#12bf831b85f17368bc71a26ac93a8493 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.4.0-h5888daf_2.conda#d48c774c40ea2047adbff043e9076e7a +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.5.0-h4d9b6c2_0.conda#c787d5a3d5b0776f0336004583297536 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.5.0-h4d9b6c2_0.conda#ad1ed56f60ec9a8e710703f38b860315 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.5.0-h3f63f65_0.conda#5bcc0022e2565606e3af7395ec3e156d +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.5.0-hac27bb2_0.conda#594ab1b892569c9cd15bcae9781a42b2 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.5.0-hac27bb2_0.conda#485e057ea6a17096b0539ca7473e4829 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.5.0-hac27bb2_0.conda#f0a9e23aa51b84be0a74a4518d4020ca +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.5.0-h3f63f65_0.conda#ae37e91183788f64935657b255cbff21 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.5.0-h5c8f2c3_0.conda#23e82dd5b616fa8879620609428791c9 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.5.0-h5c8f2c3_0.conda#ecf440381b082f7d2b9cb66d62d76efb +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.5.0-h5888daf_0.conda#a5baecc3ef0d0cca99d08cf335c06c03 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.5.0-h6481b9d_0.conda#698ad10adfc7aa6553392677fffe054f +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.5.0-h5888daf_0.conda#1c25d4e1965049a85c83762eaecb4436 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py310h68603db_0.conda#409498230a11a71578ed49d006165249 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 @@ -316,7 +310,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py310h5eaa309_0.c https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-6.1.2-gpl_hdfc89ed_706.conda#196d43749bd6adac662856d836b2b2eb +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h4c12d27_707.conda#aee131a2c291ca7d0d703900515aa772 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 @@ -324,10 +318,10 @@ https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py310hf462 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py310h9617cfe_209.conda#1989896d5ae944eced08372bda5676a5 -https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py310hc8241c7_209.conda#063eb6107225478aa00f283b102f3ec8 -https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py310he5e186c_209.conda#03fd79331809ea4812c5430e47c04723 +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py310hc609d01_213.conda#e4b5b9689e73488d3e2bfbdcf6712381 +https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py310h3d4e8c9_213.conda#c0f8d714a33bb4b3f4e57011a6f476aa +https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py310h3d4e8c9_213.conda#b29e003cbd9c87ff1a72fd32fa85e12b https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 diff --git a/requirements/locks/py311-linux-64.lock b/requirements/locks/py311-linux-64.lock index 57d182bb58..f23c64fb20 100644 --- a/requirements/locks/py311-linux-64.lock +++ b/requirements/locks/py311-linux-64.lock @@ -19,7 +19,6 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db @@ -27,28 +26,30 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxshmfence-1.3.3-hb9d3cd8_0.conda#9a809ce9f65460195777f2f2116bae02 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab +https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda#fcd2016d1d299f654f81021e27496818 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 +https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.0-h84d6215_0.conda#ea9564ba97545d7f8944632f60e4c1b9 +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda#efab66b82ec976930b96d62a976de8e7 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 @@ -56,16 +57,20 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 +https://conda.anaconda.org/conda-forge/linux-64/libtasn1-4.19.0-h166bdaf_0.tar.bz2#93840744a8552e9ebf6bb1a5dffc125a https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libunistring-0.9.10-h7f98852_0.tar.bz2#7245a044b4a1980ed83196176b78b73a https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 +https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe +https://conda.anaconda.org/conda-forge/linux-64/nettle-3.9.1-h7ab15ed_0.conda#2bf1915cc107738811368afcb0993a59 https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 @@ -87,18 +92,20 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda#4fab9799da9571266d05ca5503330655 https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda#9aba7960731e6b4547b3a52f812ed801 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda#d8703f1ffe5a06356f06467f1d0b9464 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f +https://conda.anaconda.org/conda-forge/linux-64/p11-kit-0.24.1-hc5aa10d_0.tar.bz2#56ee94e34b71742bbdfa832c974e47a8 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 @@ -106,29 +113,23 @@ https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda#8637c3e5821654d0edf97e2b0404b443 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda#ad748ccca349aec3e91743e08b5e2b50 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_4.conda#af19508df9d2e9f6894a9076a0857dc7 https://conda.anaconda.org/conda-forge/linux-64/python-3.11.11-h9e4cc4f_1_cpython.conda#8387070aa413ce9a8cc35a509fae938b https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -147,9 +148,7 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.con https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py311h55d416d_3.conda#d21db006755203fe890596d3eae992ce -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e @@ -158,6 +157,7 @@ https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py311h9ecbd09_0.conda#75424a18fb275a18b288c099b869c3bc https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a @@ -167,15 +167,14 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py311hd18a35c_0.conda#be34c90cce87090d24da64a7c239ca96 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda#a28808eae584c7f519943719b2a2b386 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.6-ha7bfdaf_0.conda#ec6abc65eefc96cba8443b2716dcc43b -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py311h38be061_2.conda#733b481d20ff260a34f2b0003ff4fbb3 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py311h2dc5d0c_1.conda#6565a715337ae279e351d0abd8ffe88a @@ -189,7 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py311h9ecbd09_0.conda#20d1c4ad24ac50f0941c63e81e4a86b7 https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py311h9ecbd09_0.conda#c78bfbe5ad64c25c2f55d57a805ba2d2 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda#232fb4577b6687b2d503ef8e254270c9 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac @@ -198,7 +197,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py311h9ecbd0 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py311h9ecbd09_1.conda#abeb54d40f439b86f75ea57045ab8496 https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -212,11 +211,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py311h9ecbd09_0.co https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py311h9ecbd09_1.conda#00895577e2b4c24dca76675ab1862551 https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e @@ -231,17 +226,14 @@ https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py311h2dc5d0c_0. https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py311h9ecbd09_0.conda#69a0a85acdcc5e6d0f1cc915c067ad4c https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py311h2dc5d0c_1.conda#04c0b385767445be8aefe0d4915cb747 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.6-default_hb5137d0_0.conda#9caebd39281536bf6bcb32f665dd4fbf -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.6-default_h9c6a7e4_0.conda#e1d2936c320083f1c520c3a17372521c -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d +https://conda.anaconda.org/conda-forge/linux-64/libidn2-2.3.7-hd590300_0.conda#2b7b0d827c6447cc1d85dc06d5b5de46 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py311hf916aec_0.conda#54a2d01ec2cfc01644f17d25978b0ecf -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py311h1322bbf_0.conda#9f4f5593335f76c1dbf7381c11fe7155 https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f @@ -251,23 +243,21 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py311h9ecbd09_0.conda#385d54815a5d2e74e68374d77446030b https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py311h2dc5d0c_0.conda#098c05da2799d9300eec94c24a7c8bda https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py311h9f3472d_1.conda#2c3c4f115d28ed9e001a271d5d8585aa https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py311hd18a35c_0.conda#351cb68d2081e249069748b6e60b3cd2 -https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 +https://conda.anaconda.org/conda-forge/linux-64/gnutls-3.8.8-h2d3e045_0.conda#28d86f5826387902f48a63906b4a1a24 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 -https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 +https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.5.0-hac27bb2_0.conda#9b7a4ae9edab6f9604f56b790c3e1d02 -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py311h9ecbd09_1.conda#28d6b63784b350a2906dc264ad8c7f2a https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py311h7db5c69_1.conda#643f8cb35133eb1be4919fb953f0a25f -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py311h9f3472d_2.conda#72b6f1a496a67977b772b53fae55308a https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py311h0f98d5a_0.conda#22531205a97c116251713008d65dfefd @@ -275,52 +265,54 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py311h9f3472d_0.conda#17334e5c12abdf2db6b25bd4187cd3e4 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py311he9a78e4_2.conda#c4aee8cadc4c9fc9a91aca0803473690 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py311hc1ac118_1.conda#a5eaf17ff42cbaad9107388402c9542a https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py311h2fdb869_2.conda#4c78235905053663d1c9e23df3f11b65 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311hd18a35c_5.conda#4e8447ca8558a203ec0577b4730073f3 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311hbc35293_1.conda#aec590674ba365e50ae83aa2d6e1efae https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py311h9f3472d_0.conda#555b148cafbd96b658499060d5e11a65 https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.5.0-h4d9b6c2_0.conda#c787d5a3d5b0776f0336004583297536 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.5.0-h4d9b6c2_0.conda#ad1ed56f60ec9a8e710703f38b860315 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.5.0-h3f63f65_0.conda#5bcc0022e2565606e3af7395ec3e156d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.5.0-hac27bb2_0.conda#594ab1b892569c9cd15bcae9781a42b2 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.5.0-hac27bb2_0.conda#485e057ea6a17096b0539ca7473e4829 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.5.0-hac27bb2_0.conda#f0a9e23aa51b84be0a74a4518d4020ca -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.5.0-h3f63f65_0.conda#ae37e91183788f64935657b255cbff21 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.5.0-h5c8f2c3_0.conda#23e82dd5b616fa8879620609428791c9 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.5.0-h5c8f2c3_0.conda#ecf440381b082f7d2b9cb66d62d76efb -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.5.0-h5888daf_0.conda#a5baecc3ef0d0cca99d08cf335c06c03 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.5.0-h6481b9d_0.conda#698ad10adfc7aa6553392677fffe054f -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.5.0-h5888daf_0.conda#1c25d4e1965049a85c83762eaecb4436 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 +https://conda.anaconda.org/conda-forge/linux-64/libmicrohttpd-1.0.1-hbc5bc17_1.conda#c384e4dcd3c345b54bfb79d9ff712349 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.6.0-h4d9b6c2_3.conda#9a3ade47ab98a071c3538246cfc138c2 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.6.0-h4d9b6c2_3.conda#246bbf8c6e41b5ea85b2af7c2c51bda5 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.6.0-h3f63f65_3.conda#0027d0eb0b43817adf23778721fc2156 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.6.0-hac27bb2_3.conda#59f8fb2a68214d2a672b245392ffd640 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.6.0-hac27bb2_3.conda#236616fe93f334dd180e0bf188fde7bd +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.6.0-hac27bb2_3.conda#61466e67e4cf21d832dfebc8e9368ecd +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.6.0-h3f63f65_3.conda#b977bfc0c549a779f812a655a8a69e6b +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.6.0-h6363af5_3.conda#be7d67d6363a63df1661aead734cb5cc +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.6.0-h6363af5_3.conda#cf097d1aa9f828ac24d9ec686411f459 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.6.0-h5888daf_3.conda#8bd1132dd3bcc6017ca73efa06422299 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.6.0-h630ec5c_3.conda#c5681cdf084833631ed5633a4e1d42fc +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.6.0-h5888daf_3.conda#dfbbe82b7a068af10f55b40837e1b942 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py311h2b939e6_0.conda#79239585ea50c427415ef629534bb3aa https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py311h7c29e4f_101.conda#d966f11d28c699da7e9de2aa2f323a4f -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py311h7db5c69_0.conda#20ba399d57a2b5de789a5b24341481a1 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/elfutils-0.192-h7f4e02f_1.conda#369ce48a589a2aac91906c9ed89dd6e8 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h4c12d27_707.conda#aee131a2c291ca7d0d703900515aa772 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h099772d_709.conda#e25da7325ba8851b237e5a9c8dfffe32 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h9f3472d_3.conda#a7c4169b1c920361597ddacb461350fd https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py311h5011d52_211.conda#92a2d6c0c3bd77a20a1b74c8941910b0 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/linux-64/mesalib-24.3.3-h0b126fc_0.conda#bcd3419a45e4f39625a13d450e1ffb40 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py311h3d4e8c9_211.conda#5b887ad0f1d5f462f1f509df54d2c711 -https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py311h3d4e8c9_211.conda#cfd2ce8f51539f6e52e83922b5235a79 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-osmesa_py311hce1a2bb_113.conda#d7b2eada0aa2718f36037d07aecaea1b +https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-osmesa_py311h838d114_113.conda#ae2674e5ebaa320086c10c2548247496 +https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-osmesa_py311h838d114_113.conda#849fd62cd47804e606a2bcf88fbf5ab4 https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 9b6194f0b8..bf899e86bd 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -19,7 +19,6 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db @@ -27,24 +26,23 @@ https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 +https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.0-h84d6215_0.conda#ea9564ba97545d7f8944632f60e4c1b9 +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -56,7 +54,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 @@ -64,7 +62,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.co https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-9.0.1-h266115a_4.conda#9a5a1e3db671a8258c3f2c1969a4c654 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f @@ -88,15 +86,14 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b1893 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda#d8703f1ffe5a06356f06467f1d0b9464 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libvpx-1.14.1-hac33072_0.conda#cde393f461e0c169d9ffb2fc70f81c33 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1.conda#e46f7ac4917215b49df2ea09a694a3fa https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 @@ -106,14 +103,10 @@ https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.1-hb711507_2.conda#8637c3e5821654d0edf97e2b0404b443 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_0.conda#ad748ccca349aec3e91743e08b5e2b50 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 @@ -123,12 +116,9 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-9.0.1-he0572af_4.conda#af19508df9d2e9f6894a9076a0857dc7 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -147,9 +137,7 @@ https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.con https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py312h8fd2918_3.conda#21e433caf1bb1e4c95832f8bb731d64c -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e @@ -168,14 +156,11 @@ https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 -https://conda.anaconda.org/conda-forge/linux-64/libllvm19-19.1.6-ha7bfdaf_0.conda#ec6abc65eefc96cba8443b2716dcc43b -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.7.0-h2c5496b_1.conda#e2eaefa4de2b7237af7c907b8bbc760a https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py312h7900ff3_2.conda#fddd3092f921be8e01b18f2a0266d98f https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda#eb227c3e0bf58f5bd69c0532b157975b @@ -189,7 +174,7 @@ https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h66e93f0_0.conda#55d5742a696d7da1c1262e99b6217ceb https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py312h66e93f0_0.conda#add2c79595fa8a9b6d653d7e4e2cf05f https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda#232fb4577b6687b2d503ef8e254270c9 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac @@ -198,7 +183,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -212,12 +197,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.co https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 -https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.5-hb9d3cd8_0.conda#eb44b3b6deb1cab08d72cb61686fe64c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 @@ -231,17 +211,13 @@ https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py312h178313f_0. https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_1.conda#bc18c46eda4c2b29431981998507e723 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp19.1-19.1.6-default_hb5137d0_0.conda#9caebd39281536bf6bcb32f665dd4fbf -https://conda.anaconda.org/conda-forge/linux-64/libclang13-19.1.6-default_h9c6a7e4_0.conda#e1d2936c320083f1c520c3a17372521c -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.9-he970967_0.conda#ca2de8bbdc871bce41dbf59e51324165 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f @@ -251,23 +227,20 @@ https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a -https://conda.anaconda.org/conda-forge/linux-64/glew-2.1.0-h9c3ff4c_2.tar.bz2#fb05eb5c47590b247658243d27fc32f1 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 -https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-h1dc1e6a_0.conda#2a66267ba586dadd110cc991063cfff7 +https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.5.0-hac27bb2_0.conda#9b7a4ae9edab6f9604f56b790c3e1d02 -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.2-h3b95a9b_1.conda#37724d8bae042345a19ca1a25dde786b +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f @@ -275,52 +248,51 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_2.conda#94688dd449f6c092e5f951780235aca1 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py312h180e4f1_1.conda#401e9d25f6ed7d9d9a06da0dca473c3e https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.5.0-h4d9b6c2_0.conda#c787d5a3d5b0776f0336004583297536 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.5.0-h4d9b6c2_0.conda#ad1ed56f60ec9a8e710703f38b860315 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.5.0-h3f63f65_0.conda#5bcc0022e2565606e3af7395ec3e156d -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.5.0-hac27bb2_0.conda#594ab1b892569c9cd15bcae9781a42b2 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.5.0-hac27bb2_0.conda#485e057ea6a17096b0539ca7473e4829 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.5.0-hac27bb2_0.conda#f0a9e23aa51b84be0a74a4518d4020ca -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.5.0-h3f63f65_0.conda#ae37e91183788f64935657b255cbff21 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.5.0-h5c8f2c3_0.conda#23e82dd5b616fa8879620609428791c9 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.5.0-h5c8f2c3_0.conda#ecf440381b082f7d2b9cb66d62d76efb -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.5.0-h5888daf_0.conda#a5baecc3ef0d0cca99d08cf335c06c03 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.5.0-h6481b9d_0.conda#698ad10adfc7aa6553392677fffe054f -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.5.0-h5888daf_0.conda#1c25d4e1965049a85c83762eaecb4436 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.6.0-h4d9b6c2_3.conda#9a3ade47ab98a071c3538246cfc138c2 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.6.0-h4d9b6c2_3.conda#246bbf8c6e41b5ea85b2af7c2c51bda5 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.6.0-h3f63f65_3.conda#0027d0eb0b43817adf23778721fc2156 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-cpu-plugin-2024.6.0-hac27bb2_3.conda#59f8fb2a68214d2a672b245392ffd640 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-gpu-plugin-2024.6.0-hac27bb2_3.conda#236616fe93f334dd180e0bf188fde7bd +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-intel-npu-plugin-2024.6.0-hac27bb2_3.conda#61466e67e4cf21d832dfebc8e9368ecd +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-ir-frontend-2024.6.0-h3f63f65_3.conda#b977bfc0c549a779f812a655a8a69e6b +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-onnx-frontend-2024.6.0-h6363af5_3.conda#be7d67d6363a63df1661aead734cb5cc +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-paddle-frontend-2024.6.0-h6363af5_3.conda#cf097d1aa9f828ac24d9ec686411f459 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-pytorch-frontend-2024.6.0-h5888daf_3.conda#8bd1132dd3bcc6017ca73efa06422299 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend-2024.6.0-h630ec5c_3.conda#c5681cdf084833631ed5633a4e1d42fc +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.6.0-h5888daf_3.conda#dfbbe82b7a068af10f55b40837e1b942 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.7.3-h6e8976b_1.conda#f3234422a977b5d400ccf503ad55c5d1 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.1-pyhd8ed1ab_1.conda#af249fc92d1344913ff6c811f5b9096b +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h4c12d27_707.conda#aee131a2c291ca7d0d703900515aa772 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h099772d_709.conda#e25da7325ba8851b237e5a9c8dfffe32 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-qt_py312he62ef8e_211.conda#172c504fa3f6f83f0a4c7d69abb9fe05 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-egl_py312hc001bbe_13.conda#559a8d091b4e8806520f7f2f797c66de https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-qt_py312h3d4e8c9_211.conda#8f47d88e25b4592dd2ce599b00d2a340 -https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-qt_py312h3d4e8c9_211.conda#4ef0f89cbdda0a3681f183642a8e3f41 +https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-egl_py312h3373a60_13.conda#5c0f519bb190b29f9c7a1d5245754685 +https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-egl_py312h3373a60_13.conda#65fa8fc7f02e1a50b924da2b2a1dacde https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 From 4508dd5306c6cdad95bb62f1a245b94239e8470c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 Jan 2025 18:40:20 +0000 Subject: [PATCH 51/74] [pre-commit.ci] pre-commit autoupdate (#6284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.6 → v0.9.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.6...v0.9.1) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- docs/src/conf.py | 2 +- lib/iris/_representation/cube_printout.py | 4 +-- lib/iris/analysis/_grid_angles.py | 5 ++-- lib/iris/analysis/_regrid.py | 5 ++-- lib/iris/analysis/_scipy_interpolate.py | 3 +- lib/iris/analysis/cartography.py | 5 ++-- lib/iris/analysis/maths.py | 8 ++--- lib/iris/analysis/trajectory.py | 3 +- lib/iris/aux_factory.py | 15 ++++------ lib/iris/common/lenient.py | 30 +++++++++---------- lib/iris/coord_systems.py | 3 +- lib/iris/coords.py | 6 ++-- lib/iris/cube.py | 14 ++++----- lib/iris/experimental/regrid.py | 3 +- lib/iris/fileformats/__init__.py | 2 +- lib/iris/fileformats/_ff.py | 8 ++--- .../fileformats/_nc_load_rules/actions.py | 3 +- .../fileformats/_nc_load_rules/helpers.py | 3 +- lib/iris/fileformats/dot.py | 2 +- lib/iris/fileformats/netcdf/saver.py | 3 +- lib/iris/fileformats/netcdf/ugrid_load.py | 3 +- lib/iris/fileformats/pp.py | 2 +- lib/iris/io/__init__.py | 5 ++-- lib/iris/iterate.py | 6 ++-- lib/iris/mesh/components.py | 17 ++++------- lib/iris/plot.py | 4 +-- lib/iris/tests/__init__.py | 7 ++--- lib/iris/tests/_shared_utils.py | 16 ++++------ .../tests/integration/netcdf/test_general.py | 3 +- .../tests/integration/plot/test_colorbar.py | 12 ++++---- .../test_mask_cube_from_shapefile.py | 30 +++++++++---------- lib/iris/tests/stock/_stock_2d_latlons.py | 4 +-- lib/iris/tests/test_coding_standards.py | 12 ++++---- lib/iris/tests/test_plot.py | 3 +- .../regrid/test__CurvilinearRegridder.py | 6 ++-- .../tests/unit/analysis/test_PERCENTILE.py | 6 ++-- .../unit/coords/test__DimensionalMetadata.py | 10 ++----- .../tests/unit/cube/test_CubeAttrsDict.py | 4 +-- .../actions/test__time_coords.py | 2 +- .../test_has_supported_mercator_parameters.py | 2 +- ...upported_polar_stereographic_parameters.py | 2 +- .../loader/ugrid_load/test_meshload_checks.py | 2 +- .../netcdf/saver/test_Saver__ugrid.py | 2 +- .../test_optimal_array_structure.py | 4 +-- .../tests/unit/mesh/components/test_MeshXY.py | 3 +- .../mesh/utils/test_recombine_submeshes.py | 2 +- lib/iris/tests/unit/pandas/test_pandas.py | 2 +- .../util/test_mask_cube_from_shapefile.py | 30 +++++++++---------- lib/iris/util.py | 14 +++------ 50 files changed, 146 insertions(+), 198 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 561494b206..db14e35356 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.6" + rev: "v0.9.1" hooks: - id: ruff types: [file, python] diff --git a/docs/src/conf.py b/docs/src/conf.py index 70b1063585..8134b3456f 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -141,7 +141,7 @@ def _dotv(version): .. |python_version| replace:: {build_python_version} .. |python_support| replace:: {python_support} .. |iris_version| replace:: v{version} -.. |build_date| replace:: ({datetime.datetime.now().strftime('%d %b %Y')}) +.. |build_date| replace:: ({datetime.datetime.now().strftime("%d %b %Y")}) """ # Add any Sphinx extension module names here, as strings. They can be diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 1e648b25f6..8169b167df 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -66,7 +66,7 @@ def add_row(self, cols, aligns, i_col_unlimited=None): """ n_cols = len(cols) if len(aligns) != n_cols: - msg = f"Number of aligns ({len(aligns)})" f" != number of cols ({n_cols})" + msg = f"Number of aligns ({len(aligns)}) != number of cols ({n_cols})" raise ValueError(msg) if self.n_columns is not None: # For now, all rows must have same number of columns @@ -106,7 +106,7 @@ def formatted_as_strings(self): elif align == "right": col_text = col.rjust(width) else: - msg = f'Unknown alignment "{align}" ' 'not in ("left", "right")' + msg = f'Unknown alignment "{align}" not in ("left", "right")' raise ValueError(msg) col_texts.append(col_text) diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 80b73d81d7..3ba406e02a 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -207,8 +207,7 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): # Now should have either 2 coords or 2 arrays. if not hasattr(x, "shape") or not hasattr(y, "shape"): msg = ( - "Inputs (x,y) must have array shape property." - "Got type(x)={} and type(y)={}." + "Inputs (x,y) must have array shape property.Got type(x)={} and type(y)={}." ) raise ValueError(msg.format(type(x), type(y))) @@ -349,7 +348,7 @@ def transform_xy_arrays(x, y): lhs_xyz = 0.5 * (xyz[..., 0] + xyz[..., 3]) rhs_xyz = 0.5 * (xyz[..., 1] + xyz[..., 2]) else: - msg = 'unrecognised cell_angle_boundpoints of "{}", ' "must be one of {}" + msg = 'unrecognised cell_angle_boundpoints of "{}", must be one of {}' raise ValueError( msg.format(cell_angle_boundpoints, list(angle_boundpoints_vals.keys())) ) diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index fd56eb04a1..23d25ecc42 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -1120,8 +1120,9 @@ def regrid_reference_surface( result.add_aux_factory(factory.updated(coord_mapping)) except KeyError: msg = ( - "Cannot update aux_factory {!r} because of dropped" - " coordinates.".format(factory.name()) + "Cannot update aux_factory {!r} because of dropped coordinates.".format( + factory.name() + ) ) warnings.warn(msg, category=IrisImpossibleUpdateWarning) diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index 251fb4bf70..4ac1df9c9c 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -203,8 +203,7 @@ def compute_interp_weights(self, xi, method=None): np.all(self.grid[i][0] <= p), np.all(p <= self.grid[i][-1]) ): raise ValueError( - "One of the requested xi is out of " - "bounds in dimension %d" % i + "One of the requested xi is out of bounds in dimension %d" % i ) method = self.method if method is None else method diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index d3967dfef3..308ba9289b 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -1078,8 +1078,9 @@ def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, """ if x.shape != y.shape: raise ValueError( - "Arrays do not have matching shapes. " - "x.shape is {}, y.shape is {}.".format(x.shape, y.shape) + "Arrays do not have matching shapes. x.shape is {}, y.shape is {}.".format( + x.shape, y.shape + ) ) ones = np.ones(x.shape) zeros = np.zeros(x.shape) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 62adf7b638..24d95153b5 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -202,7 +202,7 @@ def _assert_is_cube(cube): from iris.cube import Cube if not isinstance(cube, Cube): - raise TypeError('The "cube" argument must be an instance of ' "iris.cube.Cube.") + raise TypeError('The "cube" argument must be an instance of iris.cube.Cube.') @_lenient_client(services=SERVICES) @@ -441,8 +441,7 @@ def _inplace_common_checks(cube, other, math_op): other_dtype = _get_dtype(other) if not np.can_cast(other_dtype, cube.dtype, "same_kind"): aemsg = ( - "Cannot perform inplace {} between {!r} " - "with {} data and {!r} with {} data." + "Cannot perform inplace {} between {!r} with {} data and {!r} with {} data." ) raise ArithmeticError( aemsg.format(math_op, cube, cube.dtype, other, other_dtype) @@ -1139,8 +1138,7 @@ def ws_units_func(u_cube, v_cube): def __repr__(self): result = ( - f"iris.analysis.maths.IFunc({self._data_func_name}, " - f"{self._unit_func_name})" + f"iris.analysis.maths.IFunc({self._data_func_name}, {self._unit_func_name})" ) return result diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 2111dd2504..100c6a5de1 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -560,8 +560,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): coord, value = sample_points[0] except (KeyError, ValueError): emsg = ( - "Sample points must be a list of " - "(coordinate, value) pairs, got {!r}." + "Sample points must be a list of (coordinate, value) pairs, got {!r}." ) raise TypeError(emsg.format(sample_points)) diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index 41e1e9f573..ae341c0976 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -427,8 +427,7 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): for coord in (pressure_at_top, surface_air_pressure): if coord.nbounds: msg = ( - f"Coordinate '{coord.name()}' has bounds. These will " - "be disregarded" + f"Coordinate '{coord.name()}' has bounds. These will be disregarded" ) warnings.warn(msg, category=IrisIgnoringBoundsWarning, stacklevel=2) @@ -438,7 +437,7 @@ def _check_dependencies(pressure_at_top, sigma, surface_air_pressure): sigma.units = cf_units.Unit("1") if not sigma.units.is_dimensionless(): raise ValueError( - f"Invalid units: 'sigma' must be dimensionless, got " f"'{sigma.units}'" + f"Invalid units: 'sigma' must be dimensionless, got '{sigma.units}'" ) if pressure_at_top.units != surface_air_pressure.units: raise ValueError( @@ -1389,9 +1388,8 @@ def _check_dependencies(s, c, eta, depth, depth_c): coord.units = cf_units.Unit("1") if coord is not None and not coord.units.is_dimensionless(): - msg = ( - "Invalid units: {} coordinate {!r} " - "must be dimensionless.".format(term, coord.name()) + msg = "Invalid units: {} coordinate {!r} must be dimensionless.".format( + term, coord.name() ) raise ValueError(msg) @@ -1770,9 +1768,8 @@ def _check_dependencies(s, c, eta, depth, depth_c): coord.units = cf_units.Unit("1") if coord is not None and not coord.units.is_dimensionless(): - msg = ( - "Invalid units: {} coordinate {!r} " - "must be dimensionless.".format(term, coord.name()) + msg = "Invalid units: {} coordinate {!r} must be dimensionless.".format( + term, coord.name() ) raise ValueError(msg) diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index b26e0f1763..d6fca461d1 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -72,16 +72,16 @@ def func(): ndargs = len(dargs) if ndargs: - assert ( - ndargs == 1 - ), f"Invalid lenient client arguments, expecting 1 got {ndargs}." - assert callable( - dargs[0] - ), "Invalid lenient client argument, expecting a callable." + assert ndargs == 1, ( + f"Invalid lenient client arguments, expecting 1 got {ndargs}." + ) + assert callable(dargs[0]), ( + "Invalid lenient client argument, expecting a callable." + ) - assert not ( - ndargs and services - ), "Invalid lenient client, got both arguments and keyword arguments." + assert not (ndargs and services), ( + "Invalid lenient client, got both arguments and keyword arguments." + ) if ndargs: # The decorator has been used as a simple naked decorator. @@ -164,12 +164,12 @@ def func(): ndargs = len(dargs) if ndargs: - assert ( - ndargs == 1 - ), f"Invalid lenient service arguments, expecting 1 got {ndargs}." - assert callable( - dargs[0] - ), "Invalid lenient service argument, expecting a callable." + assert ndargs == 1, ( + f"Invalid lenient service arguments, expecting 1 got {ndargs}." + ) + assert callable(dargs[0]), ( + "Invalid lenient service argument, expecting a callable." + ) if ndargs: # The decorator has been used as a simple naked decorator. diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 4da46ae249..a8f78b0ebf 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -769,8 +769,7 @@ def as_cartopy_crs(self): globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) warnings.warn( - "Discarding false_easting and false_northing that are " - "not used by Cartopy.", + "Discarding false_easting and false_northing that are not used by Cartopy.", category=iris.warnings.IrisDefaultingWarning, ) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 06a271cbba..029d2c603e 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2253,8 +2253,7 @@ def _guess_bounds(self, bound_position=0.5, monthly=False, yearly=False): if self.has_bounds(): raise ValueError( - "Coord already has bounds. Remove the bounds " - "before guessing new ones." + "Coord already has bounds. Remove the bounds before guessing new ones." ) if monthly or yearly: @@ -2446,8 +2445,7 @@ def nearest_neighbour_index(self, point): bounds = self.bounds if self.has_bounds() else np.array([]) if self.ndim != 1: raise ValueError( - "Nearest-neighbour is currently limited" - " to one-dimensional coordinates." + "Nearest-neighbour is currently limited to one-dimensional coordinates." ) do_circular = getattr(self, "circular", False) if do_circular: diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 30ac3432b7..e84ff202b9 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1546,9 +1546,8 @@ def _check_multi_dim_metadata( if data_dims: if len(data_dims) != metadata.ndim: - msg = ( - "Invalid data dimensions: {} given, {} expected for " - "{!r}.".format(len(data_dims), metadata.ndim, metadata.name()) + msg = "Invalid data dimensions: {} given, {} expected for {!r}.".format( + len(data_dims), metadata.ndim, metadata.name() ) raise iris.exceptions.CannotAddError(msg) # Check compatibility with the shape of the data @@ -2401,8 +2400,7 @@ def coord( bad_name = _name or standard_name or long_name or "" emsg = ( - f"Expected to find exactly 1 {bad_name!r} coordinate, " - "but found none." + f"Expected to find exactly 1 {bad_name!r} coordinate, but found none." ) raise iris.exceptions.CoordinateNotFoundError(emsg) @@ -2615,8 +2613,7 @@ def cell_measure( if len(cell_measures) > 1: msg = ( - "Expected to find exactly 1 cell_measure, but found {}. " - "They were: {}." + "Expected to find exactly 1 cell_measure, but found {}. They were: {}." ) msg = msg.format( len(cell_measures), @@ -2635,8 +2632,7 @@ def cell_measure( ) raise iris.exceptions.CellMeasureNotFoundError(emsg) msg = ( - f"Expected to find exactly 1 {bad_name!r} cell measure, " - "but found none." + f"Expected to find exactly 1 {bad_name!r} cell measure, but found none." ) raise iris.exceptions.CellMeasureNotFoundError(msg) diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 4ffad43a2c..9ad93f83b6 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -284,8 +284,7 @@ def __init__(self, src_cube, tgt_grid_cube, method, projection=None): ) if src_x_coord.coord_system is None: raise ValueError( - "'src_cube' lateral geographic coordinates have " - "no coordinate system." + "'src_cube' lateral geographic coordinates have no coordinate system." ) tgt_x_coord, tgt_y_coord = get_xy_dim_coords(tgt_grid_cube) if tgt_x_coord.coord_system != tgt_y_coord.coord_system: diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 174c4d390c..6729141bf6 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -62,7 +62,7 @@ def _load_grib(*args, **kwargs): from iris_grib import load_cubes except ImportError: raise RuntimeError( - "Unable to load GRIB file - " '"iris_grib" package is not installed.' + 'Unable to load GRIB file - "iris_grib" package is not installed.' ) return load_cubes(*args, **kwargs) diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index b6b45b8b3c..6976b775e1 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -363,8 +363,9 @@ def __init__(self, filename, word_depth=DEFAULT_FF_WORD_DEPTH): res = res.reshape((addr[1], addr[2]), order="F") else: raise ValueError( - "ff header element {} is not" - "handled correctly".format(elem) + "ff header element {} is nothandled correctly".format( + elem + ) ) else: res = None @@ -809,8 +810,7 @@ def _extract_field(self): yield result_field except ValueError as valerr: msg = ( - "Input field skipped as PPField creation failed :" - " error = {!r}" + "Input field skipped as PPField creation failed : error = {!r}" ) warnings.warn(msg.format(str(valerr)), category=IrisLoadWarning) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 1611ef7160..c65483a908 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -76,8 +76,7 @@ def _default_rulenamesfunc(func_name): funcname_prefix = "action_" rulename_prefix = "fc_" # To match existing behaviours rule_name = func_name - if rule_name.startswith(funcname_prefix): - rule_name = rule_name[len(funcname_prefix) :] + rule_name = rule_name.removeprefix(funcname_prefix) if not rule_name.startswith(rulename_prefix): rule_name = rulename_prefix + rule_name return rule_name diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index faf40ad210..bd9d625b8f 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -311,8 +311,7 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: bracket_depth -= 1 if bracket_depth < 0: msg = ( - "Cell methods may be incorrectly parsed due to mismatched " - "brackets" + "Cell methods may be incorrectly parsed due to mismatched brackets" ) warnings.warn( msg, diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 8405368ade..3c37395f6c 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -117,7 +117,7 @@ def save_png(source, target, launch=False): # Create png data if not _dot_path(): raise ValueError( - 'Executable "dot" not found: ' "Review dot_path setting in site.cfg." + 'Executable "dot" not found: Review dot_path setting in site.cfg.' ) # To filename or open file handle? if isinstance(target, str): diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 8aa551be57..fa434bd439 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -672,8 +672,7 @@ def _coerce_value(val_attr, val_attr_value, data_dtype): or container.attributes.get("valid_max") is not None ) and container.attributes.get("valid_range") is not None: msg = ( - 'Both "valid_range" and "valid_min" or "valid_max" ' - "attributes present." + 'Both "valid_range" and "valid_min" or "valid_max" attributes present.' ) raise ValueError(msg) diff --git a/lib/iris/fileformats/netcdf/ugrid_load.py b/lib/iris/fileformats/netcdf/ugrid_load.py index 0a70567f16..b1d9eb36a2 100644 --- a/lib/iris/fileformats/netcdf/ugrid_load.py +++ b/lib/iris/fileformats/netcdf/ugrid_load.py @@ -426,8 +426,7 @@ def _build_mesh_coords(mesh, cf_var): # We should probably issue warnings and recover, but that is too much # work. Raising a more intelligible error is easy to do though. msg = ( - f"mesh data variable {cf_var.name!r} has an invalid " - f"location={location!r}." + f"mesh data variable {cf_var.name!r} has an invalid location={location!r}." ) raise ValueError(msg) mesh_dim_name = element_dimensions.get(location) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 3b38304f00..5f3b74de52 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -304,7 +304,7 @@ def from_msi(msi): if msi_match is None: raise ValueError( - 'Expected STASH code MSI string "mXXsXXiXXX", ' "got %r" % (msi,) + 'Expected STASH code MSI string "mXXsXXiXXX", got %r' % (msi,) ) return STASH(*msi_match.groups()) diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 50f0f3c4e6..4e32ebf20a 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -162,8 +162,7 @@ def expand_filespecs(file_specs, files_expected=True): """ # Remove any hostname component - currently unused filenames = [ - os.path.abspath(os.path.expanduser(fn[2:] if fn.startswith("//") else fn)) - for fn in file_specs + os.path.abspath(os.path.expanduser(fn.removeprefix("//"))) for fn in file_specs ] if files_expected: @@ -301,7 +300,7 @@ def _grib_save(cube, target, append=False, **kwargs): from iris_grib import save_grib2 except ImportError: raise RuntimeError( - "Unable to save GRIB file - " '"iris_grib" package is not installed.' + 'Unable to save GRIB file - "iris_grib" package is not installed.' ) save_grib2(cube, target, append, **kwargs) diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 0cf7a035be..fd2d2ed139 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -90,9 +90,9 @@ def izip(*cubes, **kwargs): if len(requested_dims) != sum( (len(cube.coord_dims(coord)) for coord in coords) ): - msg = ( - "The requested coordinates (%r) of cube (%r) are not " - "orthogonal." % ([coord.name() for coord in coords], cube) + msg = "The requested coordinates (%r) of cube (%r) are not orthogonal." % ( + [coord.name() for coord in coords], + cube, ) raise ValueError(msg) diff --git a/lib/iris/mesh/components.py b/lib/iris/mesh/components.py index ef7b7c3575..5c9ffefc3e 100644 --- a/lib/iris/mesh/components.py +++ b/lib/iris/mesh/components.py @@ -180,8 +180,7 @@ def __init__( def validate_arg_vs_list(arg_name, arg, valid_list): if arg not in valid_list: error_msg = ( - f"Invalid {arg_name} . Got: {arg} . Must be one of: " - f"{valid_list} ." + f"Invalid {arg_name} . Got: {arg} . Must be one of: {valid_list} ." ) raise ValueError(error_msg) @@ -847,7 +846,7 @@ def check_shape(array_name): raise ValueError(message) shapes = [array.shape for array in arrays] if shapes.count(shapes[0]) != len(shapes): - message = f"{array_name} shapes are not identical for all " f"coords." + message = f"{array_name} shapes are not identical for all coords." raise ValueError(message) for array in ("points", "bounds"): @@ -865,7 +864,7 @@ def check_shape(array_name): bounds_dim1 = bounds_shape[1] if bounds_dim1 < 2: message = ( - f"Expected coordinate bounds.shape (n, >" f"=2), got: {bounds_shape} ." + f"Expected coordinate bounds.shape (n, >=2), got: {bounds_shape} ." ) raise ValueError(message) elif bounds_dim1 == 2: @@ -2450,8 +2449,7 @@ def add(self, *connectivities): # Check is list values are identical. if not counts.count(counts[0]) == len(counts): message = ( - f"Invalid Connectivities provided - inconsistent " - f"{element} counts." + f"Invalid Connectivities provided - inconsistent {element} counts." ) raise ValueError(message) @@ -2477,8 +2475,7 @@ def filter(self, **kwargs): _name = item.name() bad_name = _name or kwargs["standard_name"] or kwargs["long_name"] or "" message = ( - f"Expected to find exactly 1 {bad_name} connectivity, " - f"but found none." + f"Expected to find exactly 1 {bad_name} connectivity, but found none." ) raise ConnectivityNotFoundError(message) @@ -2700,9 +2697,7 @@ def __init__( # Validate and record the class-specific constructor args. if not isinstance(mesh, MeshXY): msg = ( # type: ignore[unreachable] - "'mesh' must be an " - f"{MeshXY.__module__}.{MeshXY.__name__}, " - f"got {mesh}." + f"'mesh' must be an {MeshXY.__module__}.{MeshXY.__name__}, got {mesh}." ) raise TypeError(msg) # Handled as a readonly ".mesh" property. diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 7d9812d11c..789d7fbf86 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -1932,13 +1932,13 @@ def update_animation_iris(i, cubes, vmin, vmax, coords): supported = ["iris.plot", "iris.quickplot"] if plot_func.__module__ not in supported: - msg = 'Given plotting module "{}" may not be supported, intended ' "use: {}." + msg = 'Given plotting module "{}" may not be supported, intended use: {}.' msg = msg.format(plot_func.__module__, supported) warnings.warn(msg, category=IrisUnsupportedPlottingWarning) supported = ["contour", "contourf", "pcolor", "pcolormesh"] if plot_func.__name__ not in supported: - msg = 'Given plotting function "{}" may not be supported, intended ' "use: {}." + msg = 'Given plotting function "{}" may not be supported, intended use: {}.' msg = msg.format(plot_func.__name__, supported) warnings.warn(msg, category=IrisUnsupportedPlottingWarning) diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index a2824f90c2..142b26f725 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -339,8 +339,7 @@ def assertCMLApproxData(self, cubes, reference_filename=None, **kwargs): for i, cube in enumerate(cubes): fname = list(reference_filename) # don't want the ".cml" for the json stats file - if fname[-1].endswith(".cml"): - fname[-1] = fname[-1][:-4] + fname[-1] = fname[-1].removesuffix(".cml") fname[-1] += ".data.%d.json" % i self.assertDataAlmostEqual(cube.data, fname, **kwargs) self.assertCML(cubes, reference_filename, checksum=False) @@ -1033,7 +1032,7 @@ class MyGeoTiffTests(test.IrisTest): skip_sample_data = unittest.skipIf( not SAMPLE_DATA_AVAILABLE, - ('Test(s) require "iris-sample-data", ' "which is not available."), + ('Test(s) require "iris-sample-data", which is not available.'), ) @@ -1045,7 +1044,7 @@ class MyGeoTiffTests(test.IrisTest): skip_inet = unittest.skipIf( not INET_AVAILABLE, - ('Test(s) require an "internet connection", ' "which is not available."), + ('Test(s) require an "internet connection", which is not available.'), ) diff --git a/lib/iris/tests/_shared_utils.py b/lib/iris/tests/_shared_utils.py index 4a0d275cdd..4b8d424c41 100644 --- a/lib/iris/tests/_shared_utils.py +++ b/lib/iris/tests/_shared_utils.py @@ -154,9 +154,7 @@ def _assert_str_same( 0, ) ) - fail_string = ( - f"{type_comparison_name} do not match: {reference_filename}\n" f"{diff}" - ) + fail_string = f"{type_comparison_name} do not match: {reference_filename}\n{diff}" assert reference_str == test_str, fail_string @@ -214,8 +212,7 @@ def _check_for_request_fixture(request, func_name: str): """ if not hasattr(request, "fixturenames"): message = ( - f"{func_name}() expected: pytest.FixtureRequest instance, got: " - f"{request}" + f"{func_name}() expected: pytest.FixtureRequest instance, got: {request}" ) raise ValueError(message) @@ -307,8 +304,7 @@ def assert_CML_approx_data( for i, cube in enumerate(cubes): fname = list(reference_filename) # don't want the ".cml" for the json stats file - if fname[-1].endswith(".cml"): - fname[-1] = fname[-1][:-4] + fname[-1] = fname[-1].removesuffix(".cml") fname[-1] += ".data.%d.json" % i assert_data_almost_equal(cube.data, fname, **kwargs) assert_CML(request, cubes, reference_filename, checksum=False) @@ -448,7 +444,7 @@ def assert_text_file(source_filename, reference_filename, desc="text file"): ) ) fail_string = ( - f"{desc} does not match: reference file " f"{reference_filename} \n {diff}" + f"{desc} does not match: reference file {reference_filename} \n {diff}" ) assert reference_text == source_text, fail_string @@ -924,7 +920,7 @@ class MyGeoTiffTests(test.IrisTest): skip_sample_data = pytest.mark.skipif( not SAMPLE_DATA_AVAILABLE, - reason=('Test(s) require "iris-sample-data", ' "which is not available."), + reason=('Test(s) require "iris-sample-data", which is not available.'), ) @@ -936,7 +932,7 @@ class MyGeoTiffTests(test.IrisTest): skip_inet = pytest.mark.skipif( not INET_AVAILABLE, - reason=('Test(s) require an "internet connection", ' "which is not available."), + reason=('Test(s) require an "internet connection", which is not available.'), ) diff --git a/lib/iris/tests/integration/netcdf/test_general.py b/lib/iris/tests/integration/netcdf/test_general.py index 88d636a315..fbe7aaa391 100644 --- a/lib/iris/tests/integration/netcdf/test_general.py +++ b/lib/iris/tests/integration/netcdf/test_general.py @@ -145,8 +145,7 @@ def test_unknown_method(self): self.assertEqual(len(warning_messages), 1) message = warning_messages[0].args[0] msg = ( - "NetCDF variable 'odd_phenomenon' contains unknown cell " - "method 'oddity'" + "NetCDF variable 'odd_phenomenon' contains unknown cell method 'oddity'" ) self.assertIn(msg, message) finally: diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py index e02e51db78..da36b56835 100644 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ b/lib/iris/tests/integration/plot/test_colorbar.py @@ -44,18 +44,18 @@ def test_common_draw_functions(self): for draw_function in self.draw_functions: mappable = draw_function(self.cube) cbar = plt.colorbar() - assert ( - cbar.mappable is mappable - ), "Problem with draw function iris.plot.{}".format(draw_function.__name__) + assert cbar.mappable is mappable, ( + "Problem with draw function iris.plot.{}".format(draw_function.__name__) + ) def test_common_draw_functions_specified_mappable(self): for draw_function in self.draw_functions: mappable_initial = draw_function(self.cube, cmap="cool") _ = draw_function(self.cube) cbar = plt.colorbar(mappable_initial) - assert ( - cbar.mappable is mappable_initial - ), "Problem with draw function iris.plot.{}".format(draw_function.__name__) + assert cbar.mappable is mappable_initial, ( + "Problem with draw function iris.plot.{}".format(draw_function.__name__) + ) def test_points_with_c_kwarg(self): mappable = points(self.cube, c=self.cube.data) diff --git a/lib/iris/tests/integration/test_mask_cube_from_shapefile.py b/lib/iris/tests/integration/test_mask_cube_from_shapefile.py index 59f3e3a72a..52fd02615d 100644 --- a/lib/iris/tests/integration/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/integration/test_mask_cube_from_shapefile.py @@ -39,9 +39,9 @@ def test_global_proj_russia(self): ][0] masked_test = mask_cube_from_shapefile(test_global, ne_russia) print(np.sum(masked_test.data)) - assert math.isclose( - np.sum(masked_test.data), 76845.37, rel_tol=0.001 - ), "Global data with Russia mask failed test" + assert math.isclose(np.sum(masked_test.data), 76845.37, rel_tol=0.001), ( + "Global data with Russia mask failed test" + ) def test_rotated_pole_proj_germany(self): path = tests.get_data_path( @@ -54,9 +54,9 @@ def test_rotated_pole_proj_germany(self): if "Germany" in country.attributes["NAME_LONG"] ][0] masked_test = mask_cube_from_shapefile(test_rotated, ne_germany) - assert math.isclose( - np.sum(masked_test.data), 179.46872, rel_tol=0.001 - ), "rotated europe data with German mask failed test" + assert math.isclose(np.sum(masked_test.data), 179.46872, rel_tol=0.001), ( + "rotated europe data with German mask failed test" + ) def test_transverse_mercator_proj_uk(self): path = tests.get_data_path( @@ -69,9 +69,9 @@ def test_transverse_mercator_proj_uk(self): if "United Kingdom" in country.attributes["NAME_LONG"] ][0] masked_test = mask_cube_from_shapefile(test_transverse, ne_uk) - assert math.isclose( - np.sum(masked_test.data), 90740.25, rel_tol=0.001 - ), "transverse mercator UK data with UK mask failed test" + assert math.isclose(np.sum(masked_test.data), 90740.25, rel_tol=0.001), ( + "transverse mercator UK data with UK mask failed test" + ) def test_rotated_pole_proj_germany_weighted_area(self): path = tests.get_data_path( @@ -86,9 +86,9 @@ def test_rotated_pole_proj_germany_weighted_area(self): masked_test = mask_cube_from_shapefile( test_rotated, ne_germany, minimum_weight=0.9 ) - assert math.isclose( - np.sum(masked_test.data), 125.60199, rel_tol=0.001 - ), "rotated europe data with 0.9 weight germany mask failed test" + assert math.isclose(np.sum(masked_test.data), 125.60199, rel_tol=0.001), ( + "rotated europe data with 0.9 weight germany mask failed test" + ) def test_4d_global_proj_brazil(self): path = tests.get_data_path(["NetCDF", "global", "xyz_t", "GEMS_CO2_Apr2006.nc"]) @@ -104,6 +104,6 @@ def test_4d_global_proj_brazil(self): ) print(np.sum(masked_test.data)) # breakpoint() - assert math.isclose( - np.sum(masked_test.data), 18616921.2, rel_tol=0.001 - ), "4d data with brazil mask failed test" + assert math.isclose(np.sum(masked_test.data), 18616921.2, rel_tol=0.001), ( + "4d data with brazil mask failed test" + ) diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index 018b64a03a..8eb862dd22 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -83,9 +83,7 @@ def grid_coords_2d_from_1d(x_coord_1d, y_coord_1d): """ for coord in (x_coord_1d, y_coord_1d): if coord.ndim != 1: - msg = ( - "Input coords must be one-dimensional. " 'Coordinate "{}" has shape {}.' - ) + msg = 'Input coords must be one-dimensional. Coordinate "{}" has shape {}.' raise ValueError(msg.format(coord.name(), coord.shape)) # Calculate centre-points as a mesh of the 2 inputs. diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 9bb26a426a..b10f96140f 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -172,13 +172,13 @@ class _WarnComboCfDefaulting(IrisCfWarning, IrisDefaultingWarning): warns_with_user_warning.append(warn_ref) # This avoids UserWarnings being raised by unwritten default behaviour. - assert ( - warns_without_category == [] - ), "All warnings raised by Iris must be raised with the category kwarg." + assert warns_without_category == [], ( + "All warnings raised by Iris must be raised with the category kwarg." + ) - assert ( - warns_with_user_warning == [] - ), "No warnings raised by Iris can be the base UserWarning class." + assert warns_with_user_warning == [], ( + "No warnings raised by Iris can be the base UserWarning class." + ) class TestLicenseHeaders(tests.IrisTest): diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index f68a9cf32a..1cfaaf09f6 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -817,8 +817,7 @@ def run_tests_1d(self, cube, results): self.check_graphic() except AssertionError as err: msg = ( - "Draw method {!r} failed with coords: {!r}. " - "Assertion message: {!s}" + "Draw method {!r} failed with coords: {!r}. Assertion message: {!s}" ) self.fail(msg.format(draw_method, rcoords, err)) diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py index 1016844a7f..f6324d1948 100644 --- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py +++ b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py @@ -43,12 +43,10 @@ def test_bad_grid_type(self): class Test___call__(tests.IrisTest): def setUp(self): self.func_setup = ( - "iris.analysis._regrid." - "_regrid_weighted_curvilinear_to_rectilinear__prepare" + "iris.analysis._regrid._regrid_weighted_curvilinear_to_rectilinear__prepare" ) self.func_operate = ( - "iris.analysis._regrid." - "_regrid_weighted_curvilinear_to_rectilinear__perform" + "iris.analysis._regrid._regrid_weighted_curvilinear_to_rectilinear__perform" ) # Define a test source grid and target grid, basically the same. self.src_grid = global_pp() diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py index 72218af830..c1d3996309 100644 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ b/lib/iris/tests/unit/analysis/test_PERCENTILE.py @@ -216,8 +216,7 @@ def test_masked(self): data = ma.arange(np.prod(shape)).reshape(shape) data[0, ::2] = ma.masked emsg = ( - "Cannot use fast np.percentile method with masked array unless " - "mdtol is 0." + "Cannot use fast np.percentile method with masked array unless mdtol is 0." ) with self.assertRaisesRegex(TypeError, emsg): PERCENTILE.aggregate(data, axis=0, percent=50, fast_percentile_method=True) @@ -324,8 +323,7 @@ def test_masked(self): data, axis=0, percent=50, fast_percentile_method=True ) emsg = ( - "Cannot use fast np.percentile method with masked array unless " - "mdtol is 0." + "Cannot use fast np.percentile method with masked array unless mdtol is 0." ) with self.assertRaisesRegex(TypeError, emsg): as_concrete_data(actual) diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py index 64246261ca..bdbd053b56 100644 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py @@ -514,10 +514,7 @@ def test_dates_scalar(self): "" ), - ( - "AuxCoord : time / (hours since 2025-03-23 01:00:00, " - "standard calendar)" - ), + ("AuxCoord : time / (hours since 2025-03-23 01:00:00, standard calendar)"), " points: [2025-03-23 01:00:00]", " shape: (1,)", " dtype: float64", @@ -1019,10 +1016,7 @@ def test_convert_dates(self): expected = [ "AuxCoord : x / (days since 1970-03-5, standard calendar)", " points: [", - ( - " 1970-03-05 00:00:00, 1970-03-06 00:00:00, " - "1970-03-07 00:00:00," - ), + (" 1970-03-05 00:00:00, 1970-03-06 00:00:00, 1970-03-07 00:00:00,"), " 1970-03-08 00:00:00, 1970-03-09 00:00:00]", " shape: (5,)", " dtype: float64", diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index 4ba65913c6..943a3268fa 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -208,9 +208,7 @@ def test__str__(self, sample_attrs): def test__repr__(self, sample_attrs): result = repr(sample_attrs) expected = ( - "CubeAttrsDict(" - "globals={'b': 2, 'z': 'that'}, " - "locals={'a': 1, 'z': 'this'})" + "CubeAttrsDict(globals={'b': 2, 'z': 'that'}, locals={'a': 1, 'z': 'this'})" ) assert result == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 4e58c3b1d3..24c5ebfa6d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -140,7 +140,7 @@ def _make_testcase_cdl( else: phenom_coords_string = " ".join(phenom_coords) phenom_coords_string = ( - " " f'phenom:coordinates = "{phenom_coords_string}" ; ' + f' phenom:coordinates = "{phenom_coords_string}" ; ' ) # Create a testcase with time dims + coords. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 66d3ffb7e4..f0dd80de85 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -116,7 +116,7 @@ def test_invalid_scale_factor_and_standard_parallel(self): self.assertEqual(len(warns), 1) self.assertRegex( str(warns[0]), - "both " '"scale_factor_at_projection_origin" and "standard_parallel"', + 'both "scale_factor_at_projection_origin" and "standard_parallel"', ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index a7dc5bd029..8ced149ff1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -159,7 +159,7 @@ def test_invalid_scale_factor_and_standard_parallel(self): self.assertEqual(len(warns), 1) self.assertRegex( str(warns[0]), - "both " '"scale_factor_at_projection_origin" and "standard_parallel"', + 'both "scale_factor_at_projection_origin" and "standard_parallel"', ) def test_absent_scale_factor_and_standard_parallel(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/ugrid_load/test_meshload_checks.py b/lib/iris/tests/unit/fileformats/netcdf/loader/ugrid_load/test_meshload_checks.py index ed98c8c3d4..3b9d1652f6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/ugrid_load/test_meshload_checks.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/ugrid_load/test_meshload_checks.py @@ -61,7 +61,7 @@ def test_extrameshvar__fail(self, failnc): param = self.param if param == "nolocation": match_msg = ( - "mesh data variable 'extra_data' has an " "invalid location=''." + "mesh data variable 'extra_data' has an invalid location=''." ) elif param == "badlocation": match_msg = ( diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 7508376840..d73baa228f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -936,7 +936,7 @@ def _namestext(names): f'{title}="{name}"' for title, name in zip(("standard", "long", "var"), names) ] - return f'({" ".join(name_texts)})' + return f"({' '.join(name_texts)})" def test_mesh_names(self): # Check the selection of mesh-variables names. diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index 7d90903304..e6e9359c26 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -29,14 +29,14 @@ def _check_arrays_and_dims(self, result, spec): self.assertEqual( result_dims, spec_dims, - 'element dims differ for "{}": ' "result={!r}, expected {!r}".format( + 'element dims differ for "{}": result={!r}, expected {!r}'.format( keyname, result_dims, spec_dims ), ) self.assertArrayEqual( result_array, spec_array, - 'element arrays differ for "{}": ' "result={!r}, expected {!r}".format( + 'element arrays differ for "{}": result={!r}, expected {!r}'.format( keyname, result_array, spec_array ), ) diff --git a/lib/iris/tests/unit/mesh/components/test_MeshXY.py b/lib/iris/tests/unit/mesh/components/test_MeshXY.py index c1977633e2..abb294f8d8 100644 --- a/lib/iris/tests/unit/mesh/components/test_MeshXY.py +++ b/lib/iris/tests/unit/mesh/components/test_MeshXY.py @@ -666,8 +666,7 @@ def test__str__longstringattribute(self): result = str(self.mesh) # Note: initial single-quote, but no final one : this is correct ! expected = ( - "'long_x_10_long_x_20_long_x_30_long_x_40_" - "long_x_50_long_x_60_long_x_70..." + "'long_x_10_long_x_20_long_x_30_long_x_40_long_x_50_long_x_60_long_x_70..." ) self.assertIn(expected + ":END", result + ":END") diff --git a/lib/iris/tests/unit/mesh/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/mesh/utils/test_recombine_submeshes.py index 5323dd5883..8e692140b6 100644 --- a/lib/iris/tests/unit/mesh/utils/test_recombine_submeshes.py +++ b/lib/iris/tests/unit/mesh/utils/test_recombine_submeshes.py @@ -336,7 +336,7 @@ def test_fail_dtype_mismatch_region_regions(self): def test_fail_dimcoord_sub_no_mesh(self): self.mesh_cube.remove_coord("level") - msg = 'has a dim-coord "level" for dimension 0, ' "but 'mesh_cube' has none." + msg = "has a dim-coord \"level\" for dimension 0, but 'mesh_cube' has none." with self.assertRaisesRegex(ValueError, msg): recombine_submeshes(self.mesh_cube, self.region_cubes) diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index 31c7e68bf8..2eae1249a7 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -34,7 +34,7 @@ skip_pandas = pytest.mark.skipif( pd is None, - reason='Test(s) require "pandas", ' "which is not available.", + reason='Test(s) require "pandas", which is not available.', ) if pd is not None: diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py index bdd5c5fc56..0bd2afda21 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py @@ -40,35 +40,35 @@ def _setup(self): def test_basic_cube_intersect(self): shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) masked_cube = mask_cube_from_shapefile(self.basic_cube, shape) - assert ( - np.sum(masked_cube.data) == 8 - ), f"basic cube masking failed test - expected 8 got {np.sum(masked_cube.data)}" + assert np.sum(masked_cube.data) == 8, ( + f"basic cube masking failed test - expected 8 got {np.sum(masked_cube.data)}" + ) def test_basic_cube_intersect_in_place(self): shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) cube = self.basic_cube.copy() mask_cube_from_shapefile(cube, shape, in_place=True) - assert ( - np.sum(cube.data) == 8 - ), f"basic cube masking failed test - expected 8 got {np.sum(cube.data)}" + assert np.sum(cube.data) == 8, ( + f"basic cube masking failed test - expected 8 got {np.sum(cube.data)}" + ) def test_basic_cube_intersect_low_weight(self): shape = shapely.geometry.box(0.1, 0.6, 1, 1) masked_cube = mask_cube_from_shapefile( self.basic_cube, shape, minimum_weight=0.2 ) - assert ( - np.sum(masked_cube.data) == 12 - ), f"basic cube masking weighting failed test - expected 12 got {np.sum(masked_cube.data)}" + assert np.sum(masked_cube.data) == 12, ( + f"basic cube masking weighting failed test - expected 12 got {np.sum(masked_cube.data)}" + ) def test_basic_cube_intersect_high_weight(self): shape = shapely.geometry.box(0.1, 0.6, 1, 1) masked_cube = mask_cube_from_shapefile( self.basic_cube, shape, minimum_weight=0.7 ) - assert ( - np.sum(masked_cube.data) == 8 - ), f"basic cube masking weighting failed test- expected 8 got {np.sum(masked_cube.data)}" + assert np.sum(masked_cube.data) == 8, ( + f"basic cube masking weighting failed test- expected 8 got {np.sum(masked_cube.data)}" + ) def test_cube_list_error(self): cubelist = iris.cube.CubeList([self.basic_cube]) @@ -88,9 +88,9 @@ def test_line_shape_warning(self): masked_cube = mask_cube_from_shapefile( self.basic_cube, shape, minimum_weight=0.1 ) - assert ( - np.sum(masked_cube.data) == 24 - ), f"basic cube masking against line failed test - expected 24 got {np.sum(masked_cube.data)}" + assert np.sum(masked_cube.data) == 24, ( + f"basic cube masking against line failed test - expected 24 got {np.sum(masked_cube.data)}" + ) def test_cube_coord_mismatch_warning(self): shape = shapely.geometry.box(0.6, 0.6, 0.9, 0.9) diff --git a/lib/iris/util.py b/lib/iris/util.py index 9681ab1484..dfefb504e9 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -592,15 +592,13 @@ def reverse(cube_or_array, coords_or_dims): axes.update(cube_or_array.coord_dims(coord_or_dim)) except AttributeError: raise TypeError( - "coords_or_dims must be int, str, coordinate " - "or sequence of these." + "coords_or_dims must be int, str, coordinate or sequence of these." ) axes = np.array(list(axes), ndmin=1) if axes.ndim != 1 or axes.size == 0: raise ValueError( - "Reverse was expecting a single axis or a 1d array " - "of axes, got %r" % axes + "Reverse was expecting a single axis or a 1d array of axes, got %r" % axes ) if np.min(axes) < 0 or np.max(axes) > cube_or_array.ndim - 1: raise ValueError( @@ -945,8 +943,7 @@ def __new__(cls, name, bases, namespace): if "_init" not in namespace: # Create a default _init method for the class method_source = ( - "def _init(self, %s):\n " - "self._init_from_tuple((%s,))" % (args, args) + "def _init(self, %s):\n self._init_from_tuple((%s,))" % (args, args) ) exec(method_source, namespace) @@ -1647,10 +1644,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): return if aux_coord not in cube.aux_coords: - msg = ( - "Attempting to promote an AuxCoord ({}) " - "which does not exist in the cube." - ) + msg = "Attempting to promote an AuxCoord ({}) which does not exist in the cube." msg = msg.format(aux_coord.name()) raise ValueError(msg) From 11f6e55d4af364a54b77af43a0dbc3fca3ab5b11 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 06:19:18 +0000 Subject: [PATCH 52/74] Bump scitools/workflows from 2025.01.1 to 2025.01.3 (#6287) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2025.01.1 to 2025.01.3. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2025.01.1...2025.01.3) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 14eb2661cf..573c6f55c6 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.1 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.3 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 244ceffabf..49fc27e72a 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.1 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.3 secrets: inherit From e5acb501ce4a28ce7a8d667fba8ee91c66bffcb0 Mon Sep 17 00:00:00 2001 From: Francesco Nattino <49899980+fnattino@users.noreply.github.com> Date: Fri, 17 Jan 2025 17:13:36 +0100 Subject: [PATCH 53/74] Lazy median aggregator (#6167) * add lazy median * add tests * add what's new entry * move tests to pytest --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 9 ++- lib/iris/analysis/__init__.py | 20 ++++- lib/iris/tests/unit/analysis/test_MEDIAN.py | 90 +++++++++++++++++++++ 3 files changed, 114 insertions(+), 5 deletions(-) create mode 100644 lib/iris/tests/unit/analysis/test_MEDIAN.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a69de60f95..7e5c50a60e 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -37,6 +37,10 @@ This document explains the changes made to Iris for this release (:issue:`6248`, :pull:`6257`) +#. `@fnattino`_ added the lazy median aggregator :class:`iris.analysis.MEDIAN` + based on the implementation discussed by `@rcomer`_ and `@stefsmeets`_ in + :issue:`4039` (:pull:`6167`). + 🐛 Bugs Fixed ============= @@ -98,8 +102,9 @@ This document explains the changes made to Iris for this release Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: - - +.. _@fnattino: https://github.com/fnattino +.. _@jrackham-mo: https://github.com/jrackham-mo +.. _@stefsmeets: https://github.com/stefsmeets .. comment Whatsnew resources in alphabetical order: \ No newline at end of file diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 2c890ef8cc..708f141de3 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1612,6 +1612,19 @@ def _lazy_max_run(array, axis=-1, **kwargs): return result +def _lazy_median(data, axis=None, **kwargs): + """Calculate the lazy median, with support for masked arrays.""" + # Dask median requires the axes to be explicitly listed. + axis = range(data.ndim) if axis is None else axis + + if np.issubdtype(data, np.integer): + data = data.astype(float) + filled = da.ma.filled(data, np.nan) + result = da.nanmedian(filled, axis=axis, **kwargs) + result_masked = da.ma.fix_invalid(result) + return result_masked + + def _rms(array, axis, **kwargs): rval = np.sqrt(ma.average(array**2, axis=axis, **kwargs)) @@ -1940,7 +1953,9 @@ def interp_order(length): """ -MEDIAN = Aggregator("median", ma.median) +MEDIAN = Aggregator( + "median", ma.median, lazy_func=_build_dask_mdtol_function(_lazy_median) +) """ An :class:`~iris.analysis.Aggregator` instance that calculates the median over a :class:`~iris.cube.Cube`, as computed by @@ -1953,8 +1968,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.MEDIAN) -This aggregator handles masked data, but NOT lazy data. For lazy aggregation, -please try :obj:`~.PERCENTILE`. +This aggregator handles masked data and lazy data. """ diff --git a/lib/iris/tests/unit/analysis/test_MEDIAN.py b/lib/iris/tests/unit/analysis/test_MEDIAN.py new file mode 100644 index 0000000000..20b781b48b --- /dev/null +++ b/lib/iris/tests/unit/analysis/test_MEDIAN.py @@ -0,0 +1,90 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for the :data:`iris.analysis.MEDIAN` aggregator.""" + +import numpy as np +import numpy.ma as ma + +from iris._lazy_data import ( + as_concrete_data, + as_lazy_data, + is_lazy_data, + is_lazy_masked_data, +) +from iris.analysis import MEDIAN +from iris.tests._shared_utils import assert_array_almost_equal, assert_array_equal + + +def _get_data(lazy=False, masked=False): + data = np.arange(16).reshape((4, 4)) + if masked: + mask = np.eye(4) + data = ma.masked_array(data=data, mask=mask) + if lazy: + data = as_lazy_data(data) + return data + + +class Test_basics: + def setup_method(self): + self.data = _get_data() + + def test_name(self): + assert MEDIAN.name() == "median" + + def test_collapse(self): + data = MEDIAN.aggregate(self.data, axis=(0, 1)) + assert_array_equal(data, [7.5]) + + +class Test_masked: + def setup_method(self): + self.data = _get_data(masked=True) + + def test_output_is_masked(self): + result = MEDIAN.aggregate(self.data, axis=1) + assert ma.isMaskedArray(result) + + def test_median_is_mask_aware(self): + # the median computed along one axis differs if the array is masked + axis = 1 + result = MEDIAN.aggregate(self.data, axis=axis) + data_no_mask = _get_data() + result_no_mask = MEDIAN.aggregate(data_no_mask, axis=axis) + assert not np.allclose(result, result_no_mask) + + +class Test_lazy: + def setup_method(self): + self.data = _get_data(lazy=True) + + def test_output_is_lazy(self): + result = MEDIAN.lazy_aggregate(self.data, axis=(0, 1)) + assert is_lazy_data(result) + + def test_shape(self): + result = MEDIAN.lazy_aggregate(self.data, axis=1) + assert result.shape == (4,) + + def test_result_values(self): + axis = 1 + result = MEDIAN.lazy_aggregate(self.data, axis=axis) + expected = np.median(as_concrete_data(self.data), axis=axis) + assert_array_almost_equal(result, expected) + + +class Test_lazy_masked: + def setup_method(self): + self.data = _get_data(lazy=True, masked=True) + + def test_output_is_lazy_and_masked(self): + result = MEDIAN.lazy_aggregate(self.data, axis=1) + assert is_lazy_masked_data(result) + + def test_result_values(self): + axis = 1 + result = MEDIAN.lazy_aggregate(self.data, axis=axis) + expected = ma.median(as_concrete_data(self.data), axis=axis) + assert_array_almost_equal(result, expected) From 40ee9e742e7ff753598a8c6350485ff0b56cdbc0 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Sat, 18 Jan 2025 06:29:54 +0000 Subject: [PATCH 54/74] Updated environment lockfiles (#6291) Co-authored-by: Lockfile bot --- requirements/locks/py312-linux-64.lock | 142 ++++++++++++------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index bf899e86bd..8abecaca86 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -9,7 +9,7 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed3 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 -https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda#dbcace4706afdfb7eb891f7b37d07c04 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.6-h005c6e1_0.conda#9464e297fa2bf08030c65a54342b48c3 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 @@ -29,6 +29,7 @@ https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_2.conda#04b34b9a40cdc48cfdab261ab176ff74 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 @@ -41,10 +42,11 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.0-h84d6215_0.conda#ea9564ba97545d7f8944632f60e4c1b9 +https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.2-h84d6215_0.conda#0dd565af73afda8201b6043ba15d0240 https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 @@ -55,7 +57,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#60 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.48.0-hee588c1_0.conda#84bd1c9a82b455e7a2f390375fb38f90 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -63,10 +65,10 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -83,10 +85,10 @@ https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 @@ -98,42 +100,26 @@ https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.11.3-he02047a_1. https://conda.anaconda.org/conda-forge/linux-64/ocl-icd-2.3.2-hb9d3cd8_2.conda#2e8d2b469559d6b2cb6fd4b34f9c8d7f https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.48.0-h9eae976_0.conda#2b3a22991c20ed6ea2ed65d3407a91f4 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 -https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c -https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 -https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda#279b0de5f6ba95457190a1c459a64e31 https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.4-pyhd8ed1ab_1.conda#296b403617bafa89df4971567af79013 https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda#364ba6c9fb03886ac979b482f39ebb92 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 @@ -143,31 +129,29 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.8-py312h84d6215_0.conda#6713467dc95509683bfa3aca08524e8a +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 -https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_1.conda#37d1af619d999ee8f1f73cf5a06f4e2f +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c +https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/loguru-0.7.2-py312h7900ff3_2.conda#fddd3092f921be8e01b18f2a0266d98f https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_1.conda#eb227c3e0bf58f5bd69c0532b157975b https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda#5c9b020a3f86799cdc6115e55df06146 https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_2.conda#5b5e3267d915a107eca793d52e1b780a https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 @@ -195,70 +179,92 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac9 https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda#e417822cb989e80a0d2b1b576fdd1657 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda#617f5d608ff8c28ad546e5d9671cbb95 +https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.1-hb9d3cd8_0.conda#279b0de5f6ba95457190a1c459a64e31 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py312h178313f_0.conda#df113f58bdfc79c98f5e07b6bd3eb4c2 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_1.conda#bc18c46eda4c2b29431981998507e723 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd -https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e -https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.1-pyhd8ed1ab_0.conda#de06336c9833cffd2a4bd6f27c4cf8ea +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda#9e38e86167e8b1ea0094747d12944ce4 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 -https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 +https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d -https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py312h180e4f1_1.conda#401e9d25f6ed7d9d9a06da0dca473c3e https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d +https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 +https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 +https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.1-py312h180e4f1_0.conda#355bcf0f629159c9bd10a406cd8b6c3a +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 +https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.6.0-h4d9b6c2_3.conda#9a3ade47ab98a071c3538246cfc138c2 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.6.0-h4d9b6c2_3.conda#246bbf8c6e41b5ea85b2af7c2c51bda5 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.6.0-h3f63f65_3.conda#0027d0eb0b43817adf23778721fc2156 @@ -273,23 +279,17 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-frontend- https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-frontend-2024.6.0-h5888daf_3.conda#dfbbe82b7a068af10f55b40837e1b942 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 -https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h099772d_709.conda#e25da7325ba8851b237e5a9c8dfffe32 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-egl_py312hc001bbe_13.conda#559a8d091b4e8806520f7f2f797c66de +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h099772d_709.conda#e25da7325ba8851b237e5a9c8dfffe32 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-egl_py312h3373a60_13.conda#5c0f519bb190b29f9c7a1d5245754685 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-egl_py312h3373a60_13.conda#65fa8fc7f02e1a50b924da2b2a1dacde From ab2a34726504274d33dab67fcf49bf037ab776ac Mon Sep 17 00:00:00 2001 From: Ruth Comer <10599679+rcomer@users.noreply.github.com> Date: Mon, 20 Jan 2025 12:00:45 +0000 Subject: [PATCH 55/74] Add support for saving string stash codes to PP (#6289) --- docs/src/whatsnew/latest.rst | 3 ++- lib/iris/fileformats/pp_save_rules.py | 3 +++ .../tests/unit/fileformats/pp/test_save.py | 21 +++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7e5c50a60e..3e0a81f591 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -45,7 +45,8 @@ This document explains the changes made to Iris for this release 🐛 Bugs Fixed ============= -#. N/A +#. `@rcomer`_ added handling for string stash codes when saving pp files. + (:issue:`6239`, :pull:`6289`) 💣 Incompatible Changes diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index b156260f72..5a0f4eabfa 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -13,6 +13,7 @@ from iris.aux_factory import HybridHeightFactory, HybridPressureFactory from iris.fileformats._ff_cross_references import STASH_TRANS from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP +import iris.fileformats.pp from iris.fileformats.rules import ( aux_factory, has_aux_factory, @@ -96,6 +97,8 @@ def _stash_rules(cube, pp): """ if "STASH" in cube.attributes: stash = cube.attributes["STASH"] + if isinstance(stash, str): + stash = iris.fileformats.pp.STASH.from_msi(stash) if isinstance(stash, iris.fileformats.pp.STASH): pp.lbuser[3] = 1000 * (stash.section or 0) + (stash.item or 0) pp.lbuser[6] = stash.model or 0 diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index b7558c4c8a..90e4e15a14 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -55,6 +55,27 @@ def test_realization(): assert member_number == 42 +def test_stash_string(): + cube = stock.lat_lon_cube() + cube.attributes["STASH"] = "m01s34i001" + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: + pp_field.lbuser = list(range(7)) + verify(cube, pp_field) + stash_num = pp_field.lbuser[3] + + assert stash_num == 34001 + + +def test_bad_stash_string(): + cube = stock.lat_lon_cube() + cube.attributes["STASH"] = "ooovarvoo" + with mock.patch("iris.fileformats.pp.PPField3", autospec=True) as pp_field: + with pytest.raises( + ValueError, match='Expected STASH code MSI string "mXXsXXiXXX"' + ): + verify(cube, pp_field) + + def _pp_save_ppfield_values(cube): """Emulate saving a cube as PP, and capture the resulting PP field values.""" # Create a test object to stand in for a real PPField. From fff727e82a404e5d40e33bf5a3bd7b5a3982ff22 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Jan 2025 18:17:30 +0000 Subject: [PATCH 56/74] [pre-commit.ci] pre-commit autoupdate (#6294) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.9.1 → v0.9.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.1...v0.9.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db14e35356..b7c063c8c7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.9.1" + rev: "v0.9.2" hooks: - id: ruff types: [file, python] From 17c874a7227e7e2cc0d2792b0cbd2ea832a6276e Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 27 Jan 2025 11:07:24 +0000 Subject: [PATCH 57/74] Revert "Fix broken link. (#6246)" (#6297) This reverts commit 752dfb8da9768ae4b9538a7c2f9539b22d57ba0f. --- docs/src/further_topics/ugrid/other_meshes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/further_topics/ugrid/other_meshes.rst index 8b1c829cf1..19f220be82 100644 --- a/docs/src/further_topics/ugrid/other_meshes.rst +++ b/docs/src/further_topics/ugrid/other_meshes.rst @@ -360,5 +360,5 @@ dimensions into a single mesh dimension. Since Iris cubes don't support a "resh .. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3 -.. _FESOM 1.4: https://www.fesom.de/models/fesom14/ +.. _FESOM 1.4: https://fesom.de/models/fesom14/ .. _NEMO: https://www.nemo-ocean.eu/ \ No newline at end of file From 723f4e5eedc167b0bb848ed1fce4d0ad7920d1ad Mon Sep 17 00:00:00 2001 From: Francesco Nattino <49899980+fnattino@users.noreply.github.com> Date: Mon, 27 Jan 2025 12:19:13 +0100 Subject: [PATCH 58/74] Lazy rectilinear interpolator (#6084) * lazy interpolation using map_complete_blocks * pre-commit fixes * replace test on interpolation with lazy data * Update lib/iris/analysis/_interpolation.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * Update lib/iris/analysis/_interpolation.py Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> * resume local import * add entry to latest.rst * add author name to list * drop duplicated method * new signature of map_complete_blocks * update docstrings on lazy data * update userguide with lazy interpolator * the unstructured NN regridder does not support lazy data * remove caching an interpolator * update what's new entry * remove links to docs section about caching interpolators --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- .../interpolation_and_regridding.rst | 70 ++------ docs/src/whatsnew/latest.rst | 7 + lib/iris/analysis/__init__.py | 8 +- lib/iris/analysis/_interpolation.py | 158 +++++++++++------- .../test_RectilinearInterpolator.py | 27 ++- 5 files changed, 139 insertions(+), 131 deletions(-) diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index 571c43bf0e..4a95276ab2 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -29,9 +29,9 @@ The following are the regridding schemes that are currently available in Iris: * point in cell regridding (:class:`iris.analysis.PointInCell`) and * area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). -The linear, nearest-neighbor, and area-weighted regridding schemes support -lazy regridding, i.e. if the source cube has lazy data, the resulting cube -will also have lazy data. +The linear and nearest-neighbour interpolation schemes, and the linear, nearest-neighbour, +and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, +the resulting cube will also have lazy data. See :doc:`real_and_lazy_data` for an introduction to lazy data. See :doc:`../further_topics/which_regridder_to_use` for a more in depth overview of the different regridders. @@ -194,46 +194,6 @@ For example, to mask values that lie beyond the range of the original data: [-- 494.44451904296875 588.888916015625 683.333251953125 777.77783203125 872.2222290039062 966.666748046875 1061.111083984375 1155.555419921875 --] - -.. _caching_an_interpolator: - -Caching an Interpolator -^^^^^^^^^^^^^^^^^^^^^^^ - -If you need to interpolate a cube on multiple sets of sample points you can -'cache' an interpolator to be used for each of these interpolations. This can -shorten the execution time of your code as the most computationally -intensive part of an interpolation is setting up the interpolator. - -To cache an interpolator you must set up an interpolator scheme and call the -scheme's interpolator method. The interpolator method takes as arguments: - -#. a cube to be interpolated, and -#. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over. - -For example: - - >>> air_temp = iris.load_cube(iris.sample_data_path('air_temp.pp')) - >>> interpolator = iris.analysis.Nearest().interpolator(air_temp, ['latitude', 'longitude']) - -When this cached interpolator is called you must pass it an iterable of sample points -that have the same form as the iterable of coordinates passed to the constructor. -So, to use the cached interpolator defined above: - - >>> latitudes = np.linspace(48, 60, 13) - >>> longitudes = np.linspace(-11, 2, 14) - >>> for lat, lon in zip(latitudes, longitudes): - ... result = interpolator([lat, lon]) - -In each case ``result`` will be a cube interpolated from the ``air_temp`` cube we -passed to interpolator. - -Note that you must specify the required extrapolation mode when setting up the cached interpolator. -For example:: - - >>> interpolator = iris.analysis.Nearest(extrapolation_mode='nan').interpolator(cube, coords) - - .. _regridding: Regridding @@ -417,12 +377,12 @@ In each case ``result`` will be the input cube regridded to the grid defined by the target grid cube (in this case ``rotated_psl``) that we used to define the cached regridder. -Regridding Lazy Data -^^^^^^^^^^^^^^^^^^^^ +Interpolating and Regridding Lazy Data +-------------------------------------- -If you are working with large cubes, especially when you are regridding to a -high resolution target grid, you may run out of memory when trying to -regrid a cube. When this happens, make sure the input cube has lazy data +If you are working with large cubes, you may run out of memory when trying to +interpolate or regrid a cube. For instance, this might happen when regridding to a +high resolution target grid. When this happens, make sure the input cube has lazy data >>> air_temp = iris.load_cube(iris.sample_data_path('A1B_north_america.nc')) >>> air_temp @@ -430,11 +390,11 @@ regrid a cube. When this happens, make sure the input cube has lazy data >>> air_temp.has_lazy_data() True -and the regridding scheme supports lazy data. All regridding schemes described -here support lazy data. If you still run out of memory even while using lazy -data, inspect the -`chunks `__ -: +and the interpolation or regridding scheme supports lazy data. All interpolation and +regridding schemes described here with exception of :class:`iris.analysis.PointInCell` +(point-in-cell regridder) and :class:`iris.analysis.UnstructuredNearest` (nearest-neighbour +regridder) support lazy data. If you still run out of memory even while using lazy data, +inspect the `chunks `__ : >>> air_temp.lazy_data().chunks ((240,), (37,), (49,)) @@ -455,6 +415,6 @@ dimension, to regrid it in 8 chunks of 30 timesteps at a time: Assuming that Dask is configured such that it processes only a few chunks of the data array at a time, this will further reduce memory use. -Note that chunking in the horizontal dimensions is not supported by the -regridding schemes. Chunks in these dimensions will automatically be combined +Note that chunking in the horizontal dimensions is not supported by the interpolation +and regridding schemes. Chunks in these dimensions will automatically be combined before regridding. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 3e0a81f591..60ff06e7b2 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -62,6 +62,13 @@ This document explains the changes made to Iris for this release #. N/A +#. `@fnattino`_ enabled lazy cube interpolation using the linear and + nearest-neighbour interpolators (:class:`iris.analysis.Linear` and + :class:`iris.analysis.Nearest`). Note that this implementation removes + performance benefits linked to caching an interpolator object. While this does + not break previously suggested code (instantiating and re-using an interpolator + object remains possible), this is no longer an advertised feature. (:pull:`6084`) + 🔥 Deprecations =============== diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 708f141de3..93d82fa575 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -2687,9 +2687,7 @@ def interpolator(self, cube, coords): the given coordinates. Typically you should use :meth:`iris.cube.Cube.interpolate` for - interpolating a cube. There are, however, some situations when - constructing your own interpolator is preferable. These are detailed - in the :ref:`user guide `. + interpolating a cube. Parameters ---------- @@ -2890,9 +2888,7 @@ def interpolator(self, cube, coords): by the dimensions of the specified coordinates. Typically you should use :meth:`iris.cube.Cube.interpolate` for - interpolating a cube. There are, however, some situations when - constructing your own interpolator is preferable. These are detailed - in the :ref:`user guide `. + interpolating a cube. Parameters ---------- diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 6904c5ae4f..67b10727ec 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -12,6 +12,7 @@ from numpy.lib.stride_tricks import as_strided import numpy.ma as ma +from iris._lazy_data import map_complete_blocks from iris.coords import AuxCoord, DimCoord import iris.util @@ -163,6 +164,15 @@ def snapshot_grid(cube): return x.copy(), y.copy() +def _interpolated_dtype(dtype, method): + """Determine the minimum base dtype required by the underlying interpolator.""" + if method == "nearest": + result = dtype + else: + result = np.result_type(_DEFAULT_DTYPE, dtype) + return result + + class RectilinearInterpolator: """Provide support for performing nearest-neighbour or linear interpolation. @@ -200,13 +210,8 @@ def __init__(self, src_cube, coords, method, extrapolation_mode): set to NaN. """ - # Trigger any deferred loading of the source cube's data and snapshot - # its state to ensure that the interpolator is impervious to external - # changes to the original source cube. The data is loaded to prevent - # the snapshot having lazy data, avoiding the potential for the - # same data to be loaded again and again. - if src_cube.has_lazy_data(): - src_cube.data + # Snapshot the cube state to ensure that the interpolator is impervious + # to external changes to the original source cube. self._src_cube = src_cube.copy() # Coordinates defining the dimensions to be interpolated. self._src_coords = [self._src_cube.coord(coord) for coord in coords] @@ -277,17 +282,27 @@ def _account_for_inverted(self, data): data = data[tuple(dim_slices)] return data - def _interpolate(self, data, interp_points): + @staticmethod + def _interpolate( + data, + src_points, + interp_points, + interp_shape, + method="linear", + extrapolation_mode="nanmask", + ): """Interpolate a data array over N dimensions. - Create and cache the underlying interpolator instance before invoking - it to perform interpolation over the data at the given coordinate point - values. + Create the interpolator instance before invoking it to perform + interpolation over the data at the given coordinate point values. Parameters ---------- data : ndarray A data array, to be interpolated in its first 'N' dimensions. + src_points : + The point values defining the dimensions to be interpolated. + (len(src_points) should be N). interp_points : ndarray An array of interpolation coordinate values. Its shape is (..., N) where N is the number of interpolation @@ -296,44 +311,53 @@ def _interpolate(self, data, interp_points): coordinate, which is mapped to the i'th data dimension. The other (leading) dimensions index over the different required sample points. + interp_shape : + The shape of the interpolated array in its first 'N' dimensions + (len(interp_shape) should be N). + method : str + Interpolation method (see :class:`iris.analysis._interpolation.RectilinearInterpolator`). + extrapolation_mode : str + Extrapolation mode (see :class:`iris.analysis._interpolation.RectilinearInterpolator`). Returns ------- :class:`np.ndarray`. - Its shape is "points_shape + extra_shape", + Its shape is "interp_shape + extra_shape", where "extra_shape" is the remaining non-interpolated dimensions of - the data array (i.e. 'data.shape[N:]'), and "points_shape" is the - leading dimensions of interp_points, - (i.e. 'interp_points.shape[:-1]'). - + the data array (i.e. 'data.shape[N:]'). """ from iris.analysis._scipy_interpolate import _RegularGridInterpolator - dtype = self._interpolated_dtype(data.dtype) + dtype = _interpolated_dtype(data.dtype, method) if data.dtype != dtype: # Perform dtype promotion. data = data.astype(dtype) - mode = EXTRAPOLATION_MODES[self._mode] - if self._interpolator is None: - # Cache the interpolator instance. - # NB. The constructor of the _RegularGridInterpolator class does - # some unnecessary checks on the fill_value parameter, - # so we set it afterwards instead. Sneaky. ;-) - self._interpolator = _RegularGridInterpolator( - self._src_points, - data, - method=self.method, - bounds_error=mode.bounds_error, - fill_value=None, - ) - else: - self._interpolator.values = data + # Determine the shape of the interpolated result. + ndims_interp = len(interp_shape) + extra_shape = data.shape[ndims_interp:] + final_shape = [*interp_shape, *extra_shape] + + mode = EXTRAPOLATION_MODES[extrapolation_mode] + _data = np.ma.getdata(data) + # NB. The constructor of the _RegularGridInterpolator class does + # some unnecessary checks on the fill_value parameter, + # so we set it afterwards instead. Sneaky. ;-) + interpolator = _RegularGridInterpolator( + src_points, + _data, + method=method, + bounds_error=mode.bounds_error, + fill_value=None, + ) + interpolator.fill_value = mode.fill_value + result = interpolator(interp_points) - # We may be re-using a cached interpolator, so ensure the fill - # value is set appropriately for extrapolating data values. - self._interpolator.fill_value = mode.fill_value - result = self._interpolator(interp_points) + # The interpolated result has now shape "points_shape + extra_shape" + # where "points_shape" is the leading dimension of "interp_points" + # (i.e. 'interp_points.shape[:-1]'). We reshape it to match the shape + # of the interpolated dimensions. + result = result.reshape(final_shape) if result.dtype != data.dtype: # Cast the data dtype to be as expected. Note that, the dtype @@ -346,13 +370,11 @@ def _interpolate(self, data, interp_points): # `data` is not a masked array. src_mask = np.ma.getmaskarray(data) # Switch the extrapolation to work with mask values. - self._interpolator.fill_value = mode.mask_fill_value - self._interpolator.values = src_mask - mask_fraction = self._interpolator(interp_points) + interpolator.fill_value = mode.mask_fill_value + interpolator.values = src_mask + mask_fraction = interpolator(interp_points) new_mask = mask_fraction > 0 - if ma.isMaskedArray(data) or np.any(new_mask): - result = np.ma.MaskedArray(result, new_mask) - + result = np.ma.MaskedArray(result, new_mask) return result def _resample_coord(self, sample_points, coord, coord_dims): @@ -458,14 +480,6 @@ def _validate(self): msg = "Cannot interpolate over the non-monotonic coordinate {}." raise ValueError(msg.format(coord.name())) - def _interpolated_dtype(self, dtype): - """Determine the minimum base dtype required by the underlying interpolator.""" - if self._method == "nearest": - result = dtype - else: - result = np.result_type(_DEFAULT_DTYPE, dtype) - return result - def _points(self, sample_points, data, data_dims=None): """Interpolate at the specified points. @@ -490,9 +504,8 @@ def _points(self, sample_points, data, data_dims=None): Returns ------- - :class:`~numpy.ndarray` or :class:`~numpy.ma.MaskedArray` - An :class:`~numpy.ndarray` or :class:`~numpy.ma.MaskedArray` - instance of the interpolated data. + ndarray + The interpolated data array. """ dims = list(range(self._src_cube.ndim)) @@ -530,19 +543,15 @@ def _points(self, sample_points, data, data_dims=None): _, src_order = zip(*sorted(dmap.items(), key=operator.itemgetter(0))) # Prepare the sample points for interpolation and calculate the - # shape of the interpolated result. + # shape of the interpolated dimensions. interp_points = [] interp_shape = [] for index, points in enumerate(sample_points): - dtype = self._interpolated_dtype(self._src_points[index].dtype) + dtype = _interpolated_dtype(self._src_points[index].dtype, self._method) points = np.array(points, dtype=dtype, ndmin=1) interp_points.append(points) interp_shape.append(points.size) - interp_shape.extend( - length for dim, length in enumerate(data.shape) if dim not in di - ) - # Convert the interpolation points into a cross-product array # with shape (n_cross_points, n_dims) interp_points = np.asarray([pts for pts in product(*interp_points)]) @@ -554,9 +563,21 @@ def _points(self, sample_points, data, data_dims=None): # Transpose data in preparation for interpolation. data = np.transpose(data, interp_order) - # Interpolate and reshape the data ... - result = self._interpolate(data, interp_points) - result = result.reshape(interp_shape) + # Interpolate the data, ensuring the interpolated dimensions + # are not chunked. + dims_not_chunked = [dmap[d] for d in di] + result = map_complete_blocks( + data, + self._interpolate, + dims=dims_not_chunked, + out_sizes=interp_shape, + dtype=_interpolated_dtype(data.dtype, self._method), + src_points=self._src_points, + interp_points=interp_points, + interp_shape=interp_shape, + method=self._method, + extrapolation_mode=self._mode, + ) if src_order != dims: # Restore the interpolated result to the original @@ -568,6 +589,9 @@ def _points(self, sample_points, data, data_dims=None): def __call__(self, sample_points, collapse_scalar=True): """Construct a cube from the specified orthogonal interpolation points. + If the source cube has lazy data, the returned cube will also + have lazy data. + Parameters ---------- sample_points : @@ -585,6 +609,14 @@ def __call__(self, sample_points, collapse_scalar=True): of the cube will be the number of original cube dimensions minus the number of scalar coordinates, if collapse_scalar is True. + Notes + ----- + .. note:: + + If the source cube has lazy data, + `chunks `__ + in the interpolated dimensions will be combined before regridding. + """ if len(sample_points) != len(self._src_coords): msg = "Expected sample points for {} coordinates, got {}." @@ -592,7 +624,7 @@ def __call__(self, sample_points, collapse_scalar=True): sample_points = _canonical_sample_points(self._src_coords, sample_points) - data = self._src_cube.data + data = self._src_cube.core_data() # Interpolate the cube payload. interpolated_data = self._points(sample_points, data) diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py index ed6e230840..1513738b7d 100644 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py @@ -499,24 +499,37 @@ def test_orthogonal_cube_squash(self): self.assertEqual(result_cube, non_collapsed_cube[0, ...]) +class Test___call___real_data(ThreeDimCube): + def test_src_cube_data_loaded(self): + # If the source cube has real data when the interpolator is + # instantiated, then the interpolated result should also have + # real data. + self.assertFalse(self.cube.has_lazy_data()) + + # Perform interpolation and check the data is real. + interpolator = RectilinearInterpolator( + self.cube, ["latitude"], LINEAR, EXTRAPOLATE + ) + res = interpolator([[1.5]]) + self.assertFalse(res.has_lazy_data()) + + class Test___call___lazy_data(ThreeDimCube): def test_src_cube_data_loaded(self): - # RectilinearInterpolator operates using a snapshot of the source cube. # If the source cube has lazy data when the interpolator is - # instantiated we want to make sure the source cube's data is - # loaded as a consequence of interpolation to avoid the risk - # of loading it again and again. + # instantiated, then the interpolated result should also have + # lazy data. # Modify self.cube to have lazy data. self.cube.data = as_lazy_data(self.data) self.assertTrue(self.cube.has_lazy_data()) - # Perform interpolation and check the data has been loaded. + # Perform interpolation and check the data is lazy.. interpolator = RectilinearInterpolator( self.cube, ["latitude"], LINEAR, EXTRAPOLATE ) - interpolator([[1.5]]) - self.assertFalse(self.cube.has_lazy_data()) + res = interpolator([[1.5]]) + self.assertTrue(res.has_lazy_data()) class Test___call___time(tests.IrisTest): From 6c0ce9ca86ec9f49804588449e2348cb35582ecd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 15:54:14 +0000 Subject: [PATCH 59/74] Bump scitools/workflows from 2025.01.3 to 2025.01.4 (#6295) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2025.01.3 to 2025.01.4. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2025.01.3...2025.01.4) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Patrick Peglar --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 573c6f55c6..b996d0dcb6 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.3 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.4 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 49fc27e72a..beb5de27ac 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.3 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.4 secrets: inherit From 99108cd6602cdeb14710ba4f11ce12a9529a79c3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:44:40 +0000 Subject: [PATCH 60/74] Bump scitools/workflows from 2025.01.4 to 2025.01.5 (#6300) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2025.01.4 to 2025.01.5. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2025.01.4...2025.01.5) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index b996d0dcb6..366c9524af 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.4 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.5 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index beb5de27ac..a76b7f8dc0 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.4 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.5 secrets: inherit From 2839fca591a507d695c562f9f6f24f40cbeae4b4 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 16:31:49 +0000 Subject: [PATCH 61/74] Updated environment lockfiles (#6296) Co-authored-by: Lockfile bot Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- requirements/locks/py312-linux-64.lock | 36 +++++++++++++------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 8abecaca86..245df76911 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -57,7 +57,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#60 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.48.0-hee588c1_0.conda#84bd1c9a82b455e7a2f390375fb38f90 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.48.0-hee588c1_1.conda#3fa05c528d8a1e2a67bbf1e36f22d3bc https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -102,7 +102,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df3 https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.14-h59595ed_0.conda#2c97dd90633508b422c11bd3018206ab https://conda.anaconda.org/conda-forge/linux-64/python-3.12.8-h9e4cc4f_1_cpython.conda#7fd2fd79436d9b473812f14e86746844 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.48.0-h9eae976_0.conda#2b3a22991c20ed6ea2ed65d3407a91f4 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.48.0-h9eae976_1.conda#0ca48fd3357c877f21ea4440fe18e2b7 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b @@ -128,12 +128,12 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c +https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda#7f402b4a1007ee355bc50ce4d24d4a57 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h178313f_1.conda#fb986e1c089021979dc79606af78ef8f https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 -https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 -https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a +https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 @@ -155,17 +155,17 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 -https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h66e93f0_0.conda#55d5742a696d7da1c1262e99b6217ceb +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py312h178313f_1.conda#349635694b4df27336bc15a49e9220e9 https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py312h66e93f0_0.conda#add2c79595fa8a9b6d653d7e4e2cf05f https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda#232fb4577b6687b2d503ef8e254270c9 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.1-pyhd8ed1ab_0.conda#392c91c42edd569a7ec99ed8648f597a https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f0_1.conda#39aed2afe4d0cf76ab3d6b09eecdbea7 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h178313f_2.conda#cf2485f39740de96e2a7f2bb18ed2fee https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9a31268f80dd46548da27e0a7bac9d68 https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 @@ -198,16 +198,16 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.cond https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py312h178313f_0.conda#df113f58bdfc79c98f5e07b6bd3eb4c2 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py312h178313f_1.conda#bc18c46eda4c2b29431981998507e723 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.5-py312h178313f_0.conda#3b2d05c3be2f99fe5c40929685a6ed2c https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda#f4b39bf00c69f56ac01e020ebfac066c https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6f5c62b_11.conda#68fc66282364981589ef36868b1a7c78 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 @@ -222,15 +222,15 @@ https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.1-pyhd8ed1ab_0.conda#de06336c9833cffd2a4bd6f27c4cf8ea https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h66e93f0_0.conda#91df2efaa08730416bec2a4502309275 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h178313f_1.conda#6822c49f294d4355f19d314b8b6063d8 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda#9e38e86167e8b1ea0094747d12944ce4 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda#c70dd0718dbccdcc6d5828de3e71399d +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.6.1-hd8ed1ab_0.conda#7f46575a91b1307441abc235d01cab66 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py312h7e784f5_0.conda#6159cab400b61f38579a7692be5e630a +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.2-py312h72c5963_0.conda#7e984cb31e0366d1812096b41b361425 https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f @@ -245,14 +245,14 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.con https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.6-pyhd8ed1ab_0.conda#d751c3b4a973ed15b57be90d68c716d1 https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.1-h861ebed_0.conda#59e660508a4de9401543303d5f576aeb https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.1-py312h180e4f1_0.conda#355bcf0f629159c9bd10a406cd8b6c3a @@ -280,7 +280,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenvino-tensorflow-lite-fron https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda#5353f5eb201a9415b12385e35ed1148d https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-egl_py312hc001bbe_13.conda#559a8d091b4e8806520f7f2f797c66de From 98159005bad9a90192c2756fa71f83aa3b4d28c5 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Thu, 30 Jan 2025 17:45:20 +0100 Subject: [PATCH 62/74] Use cube chunks for weights in aggregations with smart weights (#6288) * Use cube chunks for smart weights * Fix docstring for area_weights * Remove unused code * Added whatsnew --- docs/src/whatsnew/latest.rst | 7 ++++- lib/iris/analysis/__init__.py | 5 ++++ lib/iris/analysis/cartography.py | 5 ++-- lib/iris/tests/test_analysis.py | 47 ++++++++++++++++++++++---------- 4 files changed, 45 insertions(+), 19 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 60ff06e7b2..e377805866 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -105,6 +105,11 @@ This document explains the changes made to Iris for this release :doc:`/developers_guide/release_do_nothing` to be more thorough and apply lessons learned from recent releases. (:pull:`6062`) +#. `@schlunma`_ made lazy [smart + weights](https://github.com/SciTools/iris/pull/5084) used for cube + aggregations have the same chunks as their parent cube if broadcasting is + necessary. (:issue:`6285`, :pull:`6288`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, @@ -115,4 +120,4 @@ This document explains the changes made to Iris for this release .. _@stefsmeets: https://github.com/stefsmeets .. comment - Whatsnew resources in alphabetical order: \ No newline at end of file + Whatsnew resources in alphabetical order: diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 93d82fa575..9a29636bbd 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1198,10 +1198,15 @@ def __init__(self, weights, cube): dim_metadata = cube._dimensional_metadata(weights) derived_array = dim_metadata._core_values() if dim_metadata.shape != cube.shape: + if isinstance(derived_array, da.Array): + chunks = cube.lazy_data().chunks + else: + chunks = None derived_array = iris.util.broadcast_to_shape( derived_array, cube.shape, dim_metadata.cube_dims(cube), + chunks=chunks, ) derived_units = dim_metadata.units diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 308ba9289b..d055266d98 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -405,9 +405,8 @@ def area_weights(cube, normalize=False, compute=True, chunks=None): If False, return a lazy dask array. If True, return a numpy array. chunks : tuple, optional If compute is False and a value is provided, then the result will use - these chunks instead of the same chunks as the cube data. The values - provided here will only be used along dimensions that are not latitude - or longitude. + these chunks. The values provided here will only be used along + dimensions that are not latitude or longitude. Returns ------- diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index 9e0bf76d34..919746e164 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -1192,8 +1192,6 @@ def test_rotate_1d(self): @_shared_utils.skip_data class TestAreaWeights: - # Note: chunks is simply ignored for non-lazy data - @pytest.mark.parametrize("chunks", [None, (2, 3)]) @pytest.fixture(autouse=True) def _setup(self, request): self.request = request @@ -1726,15 +1724,24 @@ def test_weights_in_kwargs(self): assert kwargs == {"test_kwarg": "test", "weights": "ignored"} +@pytest.mark.parametrize("lazy", [True, False]) class TestWeights: @pytest.fixture(autouse=True) - def _setup_test_data(self): - self.array_lib = np - self.target_type = np.ndarray + def _setup_test_data(self, lazy): + if lazy: + self.array_lib = da + self.target_type = da.Array + self.chunks = ((2,), (1, 1, 1)) + else: + self.array_lib = np + self.target_type = np.ndarray + self.chunks = None self.create_test_data() def create_test_data(self): self.data = self.array_lib.arange(6).reshape(2, 3) + if self.chunks is not None: + self.data = self.data.rechunk(self.chunks) self.lat = iris.coords.DimCoord( self.array_lib.array([0, 1]), standard_name="latitude", @@ -1770,6 +1777,8 @@ def test_init_with_array(self): assert isinstance(weights.units, cf_units.Unit) assert weights.array is self.data assert weights.units == "1" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_cube(self): weights = _Weights(self.cube, self.cube) @@ -1777,6 +1786,8 @@ def test_init_with_cube(self): assert isinstance(weights.units, cf_units.Unit) assert weights.array is self.data assert weights.units == "K" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_str_dim_coord(self): weights = _Weights("latitude", self.cube) @@ -1792,6 +1803,8 @@ def test_init_with_str_aux_coord(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_str_ancillary_variable(self): weights = _Weights("ancvar", self.cube) @@ -1799,6 +1812,10 @@ def test_init_with_str_ancillary_variable(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" + # Chunks of existing array dimensions passed to broadcast_to_shape are + # ignored + if self.chunks is not None: + assert weights.array.chunks == ((2,), (3,)) def test_init_with_str_cell_measure(self): weights = _Weights("cell_area", self.cube) @@ -1806,6 +1823,8 @@ def test_init_with_str_cell_measure(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, self.data) assert weights.units == "m2" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_dim_coord(self): weights = _Weights(self.lat, self.cube) @@ -1821,6 +1840,8 @@ def test_init_with_aux_coord(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, [[3, 3, 3], [4, 4, 4]]) assert weights.units == "s" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_ancillary_variable(self): weights = _Weights(self.ancillary_variable, self.cube) @@ -1828,6 +1849,10 @@ def test_init_with_ancillary_variable(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, [[5, 6, 7], [5, 6, 7]]) assert weights.units == "kg" + # Chunks of existing array dimensions passed to broadcast_to_shape are + # ignored + if self.chunks is not None: + assert weights.array.chunks == ((2,), (3,)) def test_init_with_cell_measure(self): weights = _Weights(self.cell_measure, self.cube) @@ -1835,6 +1860,8 @@ def test_init_with_cell_measure(self): assert isinstance(weights.units, cf_units.Unit) _shared_utils.assert_array_equal(weights.array, self.data) assert weights.units == "m2" + if self.chunks is not None: + assert weights.array.chunks == self.chunks def test_init_with_list(self): list_in = [0, 1, 2] @@ -1845,16 +1872,6 @@ def test_init_with_list(self): assert weights.units == "1" -class TestWeightsLazy(TestWeights): - """Repeat tests from ``TestWeights`` with lazy arrays.""" - - @pytest.fixture(autouse=True) - def _setup_test_data(self): - self.array_lib = da - self.target_type = da.core.Array - self.create_test_data() - - def test__Groupby_repr(): groupby_coord = iris.coords.AuxCoord([2000, 2000], var_name="year") shared_coord = iris.coords.DimCoord( From a883b8013164dbd257b739859883dfc5ab0475cf Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 10:06:10 +0000 Subject: [PATCH 63/74] [pre-commit.ci] pre-commit autoupdate (#6298) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.9.2 → v0.9.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.2...v0.9.4) - [github.com/codespell-project/codespell: v2.3.0 → v2.4.1](https://github.com/codespell-project/codespell/compare/v2.3.0...v2.4.1) * Odd fixes for link + spelling failures. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Patrick Peglar --- .pre-commit-config.yaml | 4 ++-- docs/src/community/iris_xarray.rst | 2 +- .../userguide/plotting_examples/cube_brewer_cite_contourf.py | 2 +- pyproject.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b7c063c8c7..e006f09613 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.9.2" + rev: "v0.9.4" hooks: - id: ruff types: [file, python] @@ -38,7 +38,7 @@ repos: types: [file, python] - repo: https://github.com/codespell-project/codespell - rev: "v2.3.0" + rev: "v2.4.1" hooks: - id: codespell types_or: [asciidoc, python, markdown, rst] diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst index 71585d8b9f..34c9aa9466 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/community/iris_xarray.rst @@ -141,7 +141,7 @@ output is not fully CF compliant (as-per `the cf checker `_, + providing `support for non-standard calendars `_, however date units may not always be saved correctly. * CF-style coordinate bounds variables are not fully understood. The CF approach where bounds variables do not usually define their units or standard_names can cause diff --git a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py index 4e28510e43..ab171bb6f2 100644 --- a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py +++ b/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py @@ -1,4 +1,4 @@ -"""Addind a citation for a plot using iris.plot.citation().""" +"""Adding a citation for a plot using iris.plot.citation().""" import matplotlib.pyplot as plt diff --git a/pyproject.toml b/pyproject.toml index 74e514ad20..b8d2c96359 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -155,7 +155,7 @@ exclude_lines = [ ] [tool.codespell] -ignore-words-list = "alpha-numeric,assertIn,degreee,discontiguities,lazyness,meaned,nin" +ignore-words-list = "alpha-numeric,assertIn,degreee,discontiguities,lazyness,meaned,nin,re-use,re-uses,re-using,re-used,anc,abl" skip = "./CODE_OF_CONDUCT.md,_build,*.css,*.ipynb,*.js,*.html,*.svg,*.xml,.git,generated" [tool.check-manifest] From 9f14d2ffab6fe77bc1cf7feb3e33780321143454 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Thu, 6 Feb 2025 17:50:48 +0100 Subject: [PATCH 64/74] Improve speed of loading small NetCDF files (#6229) * Read list of variables only once * Add whatsnew * Improve whatsnew * Add benchmark for files with many cubes --- benchmarks/benchmarks/load/__init__.py | 52 ++++++++++++++++++++++++++ docs/src/whatsnew/latest.rst | 3 +- lib/iris/cube.py | 3 +- lib/iris/fileformats/cf.py | 30 +++++++-------- 4 files changed, 71 insertions(+), 17 deletions(-) diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index a4dfb40d19..5c5a62a515 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -132,6 +132,58 @@ def time_many_var_load(self) -> None: _ = load(str(self.FILE_PATH)) +class ManyCubes: + FILE_PATH = BENCHMARK_DATA / "many_cube_file.nc" + + @staticmethod + def _create_file(save_path: str) -> None: + """Run externally - everything must be self-contained.""" + import numpy as np + + from iris import save + from iris.coords import AuxCoord, DimCoord + from iris.cube import Cube, CubeList + + data_len = 81920 + bnds_len = 3 + data = np.arange(data_len).astype(np.float32) + bnds_data = ( + np.arange(data_len * bnds_len) + .astype(np.float32) + .reshape(data_len, bnds_len) + ) + time = DimCoord(np.array([0]), standard_name="time") + lat = AuxCoord( + data, bounds=bnds_data, standard_name="latitude", units="degrees" + ) + lon = AuxCoord( + data, bounds=bnds_data, standard_name="longitude", units="degrees" + ) + cube = Cube(data.reshape(1, -1), units="unknown") + cube.add_dim_coord(time, 0) + cube.add_aux_coord(lat, 1) + cube.add_aux_coord(lon, 1) + + n_cubes = 100 + cubes = CubeList() + for i in range(n_cubes): + cube = cube.copy() + cube.long_name = f"var_{i}" + cubes.append(cube) + save(cubes, save_path) + + def setup_cache(self) -> None: + if not REUSE_DATA or not self.FILE_PATH.is_file(): + # See :mod:`benchmarks.generate_data` docstring for full explanation. + _ = run_function_elsewhere( + self._create_file, + str(self.FILE_PATH), + ) + + def time_many_cube_load(self) -> None: + _ = load(str(self.FILE_PATH)) + + class StructuredFF: """Test structured loading of a large-ish fieldsfile. diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index e377805866..74d090a006 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -60,7 +60,8 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. N/A +#. `@bouweandela`_ made loading :class:`~iris.cube.Cube`s from small NetCDF + files faster. (:pull:`6229`) #. `@fnattino`_ enabled lazy cube interpolation using the linear and nearest-neighbour interpolators (:class:`iris.analysis.Linear` and diff --git a/lib/iris/cube.py b/lib/iris/cube.py index e84ff202b9..bb11f65440 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -924,7 +924,8 @@ def __eq__(self, other): # For equality, require both globals + locals to match exactly. # NOTE: array content works correctly, since 'locals' and 'globals' are always # iris.common.mixin.LimitedAttributeDict, which gets this right. - other = CubeAttrsDict(other) + if not isinstance(other, CubeAttrsDict): + other = CubeAttrsDict(other) result = self.locals == other.locals and self.globals == other.globals return result diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index f1efec81f6..21c2b3d122 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -1337,9 +1337,11 @@ def __init__(self, file_source, warn=False, monotonic=False): self._trim_ugrid_variable_types() self._with_ugrid = False - self._translate() - self._build_cf_groups() - self._reset() + # Read the variables in the dataset only once to reduce runtime. + variables = self._dataset.variables + self._translate(variables) + self._build_cf_groups(variables) + self._reset(variables) def __enter__(self): # Enable use as a context manager @@ -1381,16 +1383,16 @@ def filename(self): def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._filename) - def _translate(self): + def _translate(self, variables): """Classify the netCDF variables into CF-netCDF variables.""" - netcdf_variable_names = list(self._dataset.variables.keys()) + netcdf_variable_names = list(variables.keys()) # Identify all CF coordinate variables first. This must be done # first as, by CF convention, the definition of a CF auxiliary # coordinate variable may include a scalar CF coordinate variable, # whereas we want these two types of variables to be mutually exclusive. coords = CFCoordinateVariable.identify( - self._dataset.variables, monotonic=self._check_monotonic + variables, monotonic=self._check_monotonic ) self.cf_group.update(coords) coordinate_names = list(self.cf_group.coordinates.keys()) @@ -1403,9 +1405,7 @@ def _translate(self): if issubclass(variable_type, CFGridMappingVariable) else coordinate_names ) - self.cf_group.update( - variable_type.identify(self._dataset.variables, ignore=ignore) - ) + self.cf_group.update(variable_type.identify(variables, ignore=ignore)) # Identify global netCDF attributes. attr_dict = { @@ -1415,7 +1415,7 @@ def _translate(self): self.cf_group.global_attributes.update(attr_dict) # Identify and register all CF formula terms. - formula_terms = _CFFormulaTermsVariable.identify(self._dataset.variables) + formula_terms = _CFFormulaTermsVariable.identify(variables) for cf_var in formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): @@ -1455,9 +1455,9 @@ def _translate(self): ) for name in data_variable_names: - self.cf_group[name] = CFDataVariable(name, self._dataset.variables[name]) + self.cf_group[name] = CFDataVariable(name, variables[name]) - def _build_cf_groups(self): + def _build_cf_groups(self, variables): """Build the first order relationships between CF-netCDF variables.""" def _build(cf_variable): @@ -1511,7 +1511,7 @@ def _span_check( ignore += coordinate_names match = variable_type.identify( - self._dataset.variables, + variables, ignore=ignore, target=cf_variable.cf_name, warn=False, @@ -1603,9 +1603,9 @@ def _span_check( promoted.add(cf_name) not_promoted = ignored.difference(promoted) - def _reset(self): + def _reset(self, variables): """Reset the attribute touch history of each variable.""" - for nc_var_name in self._dataset.variables.keys(): + for nc_var_name in variables.keys(): self.cf_group[nc_var_name].cf_attrs_reset() def _close(self): From 27e532f7071e343f6a1cf3d8e3741ad913b18eac Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:15:26 +0000 Subject: [PATCH 65/74] Updated environment lockfiles (#6279) Co-authored-by: Lockfile bot merge-commit --- requirements/locks/py312-linux-64.lock | 88 ++++++++++++++++++-------- 1 file changed, 61 insertions(+), 27 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 245df76911..d163b488c7 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -3,14 +3,16 @@ # input_hash: 989d858ad22ed9fe27cc23f25fd7ad423d1250d679d35944ae71177ccc27a44e @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda#720523eb0d6a9b0f6120c16b2aa4e7de +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2025.1.31-hbcca054_0.conda#19f3a56f68d2fd06c516076bff482c52 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda#dbcace4706afdfb7eb891f7b37d07c04 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.0.6-h005c6e1_0.conda#9464e297fa2bf08030c65a54342b48c3 +https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.40-hd8ed1ab_0.conda#6cc41cdfd792cdca2e32dc0ace89b712 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 @@ -19,44 +21,51 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.13-hb9d3cd8_0.conda#ae1370588aa6a5157c34c73e9bbb36a0 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.4-hb9d3cd8_0.conda#42d5b6a0f30d3c10cd88cb8584fda1cb https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_2.conda#04b34b9a40cdc48cfdab261ab176ff74 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 +https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-h166bdaf_1.tar.bz2#a089d06164afd2d511347d3f87214e0b +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab +https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda#fcd2016d1d299f654f81021e27496818 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 -https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.2-h84d6215_0.conda#0dd565af73afda8201b6043ba15d0240 +https://conda.anaconda.org/conda-forge/linux-64/level-zero-1.20.4-h84d6215_0.conda#bc9c9b49a021a2c5704af32e5c95f88e https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda#efab66b82ec976930b96d62a976de8e7 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.51-hbd13f7d_1.conda#168cc19c031482f83b23c4eebbb94e26 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-h4ab18f5_0.conda#601bfb4b3c6f0b844443bb81a56651e0 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.46-h943b412_0.conda#adcf7bacff219488e29cfa95a2abd8f7 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.48.0-hee588c1_1.conda#3fa05c528d8a1e2a67bbf1e36f22d3bc https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 @@ -65,6 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.32.9-hc50e24c_0.conda#c7f302fd11eeb0987a6a5e1f3aed6a21 https://conda.anaconda.org/conda-forge/linux-64/opencl-headers-2024.10.24-h5888daf_0.conda#3ba02cce423fdac1a8582bd6bb189359 https://conda.anaconda.org/conda-forge/linux-64/openh264-2.5.0-hf92e6e3_0.conda#d1b18a73fc3cfd0de9c7e786d2febb8f https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 @@ -88,7 +98,11 @@ https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b1893 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda#4fab9799da9571266d05ca5503330655 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.71-h39aace5_0.conda#dd19e4e3043f6948bd7454b946ee0983 https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-lib-1.11.0-hb9d3cd8_2.conda#e55712ff40a054134d51b89afca57dbc +https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda#9aba7960731e6b4547b3a52f812ed801 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 @@ -107,11 +121,11 @@ https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda https://conda.anaconda.org/conda-forge/linux-64/wayland-1.23.1-h3e06ad9_0.conda#0a732427643ae5e0486a727927791da1 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.11-h4f16b4b_0.conda#b6eb6d0cb323179af168df8fe16fb0a1 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.4-pyhd8ed1ab_1.conda#296b403617bafa89df4971567af79013 https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a -https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a +https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda#2cc3f588512f04f3a0c64b4e9bedc02d https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf @@ -131,7 +145,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a https://conda.anaconda.org/conda-forge/noarch/filelock-3.17.0-pyhd8ed1ab_0.conda#7f402b4a1007ee355bc50ce4d24d4a57 https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h178313f_1.conda#fb986e1c089021979dc79606af78ef8f -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2025.2.0-pyhd8ed1ab_0.conda#d9ea16b71920b03beafc17fcca16df90 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 @@ -139,10 +153,13 @@ https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.b https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.1-pyhd8ed1ab_1.conda#ae376af0a29183e98a95508ed6944664 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.8-py312h84d6215_0.conda#6713467dc95509683bfa3aca08524e8a -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a +https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-28_h59b9bed_openblas.conda#73e2a99fdeb8531d50168987378fda8a +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h4637d8d_4.conda#d4529f4dff3057982a7617c7ac58fde3 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_1.conda#37d1af619d999ee8f1f73cf5a06f4e2f https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-257.2-h3dc2cb9_0.conda#40c12fdd396297db83f789722027f5ed https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 @@ -170,7 +187,7 @@ https://conda.anaconda.org/conda-forge/noarch/scooby-0.10.0-pyhd8ed1ab_1.conda#9 https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e -https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c @@ -180,8 +197,8 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py312h66e93f0_0.conda#e417822cb989e80a0d2b1b576fdd1657 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-16.0.0-py312h66e93f0_0.conda#617f5d608ff8c28ad546e5d9671cbb95 -https://conda.anaconda.org/conda-forge/noarch/wayland-protocols-1.37-hd8ed1ab_0.conda#73ec79a77d31eb7e4a3276cd246b776c https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.43-hb9d3cd8_0.conda#f725c7425d6d7c15e31f3b99a88ea02f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -191,47 +208,57 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.2-pyhd8ed1ab_0.conda#1a3981115a398535dbe3f6d5faae3d36 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 +https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py312h178313f_0.conda#df113f58bdfc79c98f5e07b6bd3eb4c2 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py312h66e93f0_0.conda#6198b134b1c08173f33653896974d477 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.5-py312h178313f_0.conda#3b2d05c3be2f99fe5c40929685a6ed2c +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.56.0-py312h178313f_0.conda#2f8a66f2f9eb931cdde040d02c6ab54c https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.82.2-h4833e2c_1.conda#e2e44caeaef6e4b107577aa46c95eb12 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 +https://conda.anaconda.org/conda-forge/noarch/h2-4.2.0-pyhd8ed1ab_0.conda#b4754fb1bdcb70c8fd54f918301582c6 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda#f4b39bf00c69f56ac01e020ebfac066c https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-28_he106b2a_openblas.conda#4e20a1c00b4e8a984aac0f6cce59e3ac https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h6f5c62b_11.conda#68fc66282364981589ef36868b1a7c78 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_h0d58e46_1001.conda#804ca9e91bcaea0824a341d55b1684f2 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-28_h7ac8fdf_openblas.conda#069f40bfbf1dc55c83ddb07fc6a6ef8d +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.8.0-hc4a0caf_0.conda#f1656760dbf05f47f962bfdc59fc3416 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_2.conda#04e691b9fadd93a8a9fad87a81d4fd8f +https://conda.anaconda.org/conda-forge/noarch/pip-25.0-pyh8b19718_0.conda#c2548760a02ed818f92dd0d8c81b55b4 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.1-pyhd8ed1ab_0.conda#de06336c9833cffd2a4bd6f27c4cf8ea +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py312h178313f_1.conda#6822c49f294d4355f19d314b8b6063d8 -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.11-py312h178313f_0.conda#8219afa093757bbe07b9825eb1973ed9 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.11.12-py312h178313f_0.conda#9f96d8b6fb9bab11e46c12132283b5b1 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda#373374a3ed20141090504031dc7b693e https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda#9e38e86167e8b1ea0094747d12944ce4 https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.6.1-hd8ed1ab_0.conda#7f46575a91b1307441abc235d01cab66 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.3-h59595ed_0.conda#ee48bf17cc83a00f59ca1494d5646869 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libva-2.22.0-h8a09558_1.conda#139262125a3eac8ff6eef898598745a3 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.2-py312h72c5963_0.conda#7e984cb31e0366d1812096b41b361425 -https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.0-pyhd8ed1ab_1.conda#9f71c0894cfc53f2bfd2703bb3dccb0d +https://conda.anaconda.org/conda-forge/noarch/pbr-6.1.1-pyhd8ed1ab_0.conda#80ef57db70bcc25593f8de5fc4fd8b14 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py312h80c1187_0.conda#d3894405f05b2c0f351d5de3ae26fa9c https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py312he630544_0.conda#427799f15b36751761941f4cbd7d780f https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 @@ -240,7 +267,9 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.co https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-8.1.0-pyhd8ed1ab_1.conda#ac738a7f524d1b157e53fb9734f85e0e https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py312h68727a3_0.conda#f5fbba0394ee45e9a64a73c2a994126a @@ -249,22 +278,23 @@ https://conda.anaconda.org/conda-forge/noarch/identify-2.6.6-pyhd8ed1ab_0.conda# https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/libass-0.17.3-hba53ac1_1.conda#f5e75fe79d446bf4975b41d375314605 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-2024.6.0-hac27bb2_3.conda#10ee0153cd8ddc6bd2ec147e7fd56280 +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.2-hc60ed4a_1.conda#ef1910918dd895516a769ed36b5b3a4e https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h66e93f0_1.conda#5fef67f50126f40f5966a9451661280d https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py312hf9745cd_1.conda#8bce4f6caaf8c5448c7ac86d87e26b4b https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.1-h861ebed_0.conda#59e660508a4de9401543303d5f576aeb -https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.3.13-py312hc0a28a1_2.conda#aa2e1e0ae18acbf72cc717c69b05ca9d +https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.1-py312hc0a28a1_0.conda#6a0691f8e533d92b14a6d5eee07b6964 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.8.0-py312hc0a28a1_0.conda#3f62987017ad18e9e7dadce9899de9ef https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.1-py312h180e4f1_0.conda#355bcf0f629159c9bd10a406cd8b6c3a -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h391bc85_2.conda#eb476b4975ea28ac12ff469063a71f5d +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.7-py312h391bc85_0.conda#3491bd7e78aa7407c965312c4a5a9254 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 https://conda.anaconda.org/conda-forge/noarch/wslink-2.2.2-pyhd8ed1ab_0.conda#860b3edb4bee7c76afb03435249e39c2 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_1.conda#d733874844f9808ed46a93362f89bc2d +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-batch-plugin-2024.6.0-h4d9b6c2_3.conda#9a3ade47ab98a071c3538246cfc138c2 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-auto-plugin-2024.6.0-h4d9b6c2_3.conda#246bbf8c6e41b5ea85b2af7c2c51bda5 https://conda.anaconda.org/conda-forge/linux-64/libopenvino-hetero-plugin-2024.6.0-h3f63f65_3.conda#0027d0eb0b43817adf23778721fc2156 @@ -281,16 +311,20 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda# https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py312hd3ec401_0.conda#c27a17a8c54c0d35cf83bbc0de8f7f77 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py312ha728dd9_101.conda#7e41ca6012a6bf609539aec0dfee93f7 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda#5353f5eb201a9415b12385e35ed1148d +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-client-17.0-hb77b528_0.conda#07f45f1be1c25345faddb8db0de8039b https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.3.1-egl_py312hc001bbe_13.conda#559a8d091b4e8806520f7f2f797c66de +https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-47.0-unix_0.conda#49436a5c604f99058473d84580f0e341 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py312hf9745cd_0.conda#ea213e31805199cb7d0da457b879ceed https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a -https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_h099772d_709.conda#e25da7325ba8851b237e5a9c8dfffe32 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h021d004_3.conda#56c679bcdb8c1d824e927088725862cb https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac +https://conda.anaconda.org/conda-forge/linux-64/sdl2-2.30.10-h63c27ac_0.conda#5cecf6d327e4f8c5dfafc71b4a8556e7 +https://conda.anaconda.org/conda-forge/linux-64/ffmpeg-7.1.0-gpl_hf09ebf5_710.conda#c03df5443f8c45fe5cb11b4339577944 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.2.1-h5ae0cbf_1.conda#df7835d2c73cd1889d377cfd6694ada4 https://conda.anaconda.org/conda-forge/linux-64/vtk-io-ffmpeg-9.3.1-egl_py312h3373a60_13.conda#5c0f519bb190b29f9c7a1d5245754685 https://conda.anaconda.org/conda-forge/linux-64/vtk-9.3.1-egl_py312h3373a60_13.conda#65fa8fc7f02e1a50b924da2b2a1dacde https://conda.anaconda.org/conda-forge/noarch/pyvista-0.44.2-pyhd8ed1ab_1.conda#575a2593fbeda1212e1502f1d585c81c From 990291c4344af690df477f814d5aedf4ed975690 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 11:35:20 +0000 Subject: [PATCH 66/74] Bump scitools/workflows from 2025.01.5 to 2025.02.0 (#6306) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2025.01.5 to 2025.02.0. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2025.01.5...2025.02.0) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 366c9524af..a9165595bb 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.01.5 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.02.0 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index a76b7f8dc0..699dba7751 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.01.5 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.02.0 secrets: inherit From 37ae6f7ca2f357ce2c6e9e46f7590c110fa3b614 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 12 Feb 2025 14:52:42 +0000 Subject: [PATCH 67/74] [pre-commit.ci] pre-commit autoupdate (#6310) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.9.4 → v0.9.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.4...v0.9.6) - [github.com/pre-commit/mirrors-mypy: v1.14.1 → v1.15.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.14.1...v1.15.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e006f09613..6e4d98903f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.9.4" + rev: "v0.9.6" hooks: - id: ruff types: [file, python] @@ -63,7 +63,7 @@ repos: types: [file, python] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.14.1' + rev: 'v1.15.0' hooks: - id: mypy additional_dependencies: From 6b9ecec2b5058ddfc1c943fc83fe8e94b396d030 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Feb 2025 09:36:42 +0000 Subject: [PATCH 68/74] Bump scitools/workflows from 2025.02.0 to 2025.02.1 (#6313) Bumps [scitools/workflows](https://github.com/scitools/workflows) from 2025.02.0 to 2025.02.1. - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2025.02.0...2025.02.1) --- updated-dependencies: - dependency-name: scitools/workflows dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index a9165595bb..5f567f7835 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.02.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.02.1 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 699dba7751..7a8272e202 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.02.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.02.1 secrets: inherit From c8334f225fb5ce5119fd071fa588cf1e0af169db Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 14 Feb 2025 15:31:21 +0100 Subject: [PATCH 69/74] Cache Dask arrays created from `NetCDFDataProxy`s to speed up loading files with multiple variables (#6252) * Cache Dask arrays to speed up loading files with multiple variables * Add benchmark for files with many cubes * Add whatsnew * Add test * Add license header * Use a global to set the cache size * Update whatsnew --- docs/src/whatsnew/latest.rst | 4 +- lib/iris/_lazy_data.py | 56 +++++++++++++++++-- .../tests/unit/lazy_data/test_as_lazy_data.py | 2 +- .../tests/unit/lazy_data/test_lrucache.py | 25 +++++++++ 4 files changed, 80 insertions(+), 7 deletions(-) create mode 100644 lib/iris/tests/unit/lazy_data/test_lrucache.py diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 74d090a006..68737d86cc 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -60,8 +60,8 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. `@bouweandela`_ made loading :class:`~iris.cube.Cube`s from small NetCDF - files faster. (:pull:`6229`) +#. `@bouweandela`_ made loading :class:`~iris.cube.Cube`s from NetCDF files + faster. (:pull:`6229` and :pull:`6252`) #. `@fnattino`_ enabled lazy cube interpolation using the linear and nearest-neighbour interpolators (:class:`iris.analysis.Linear` and diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index a3dfa1edb4..7546aaea04 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -19,6 +19,9 @@ import numpy as np import numpy.ma as ma +MAX_CACHE_SIZE = 100 +"""Maximum number of Dask arrays to cache.""" + def non_lazy(func): """Turn a lazy function into a function that returns a result immediately.""" @@ -202,6 +205,7 @@ def _optimum_chunksize_internals( dim = working[0] working = working[1:] result.append(dim) + result = tuple(result) return result @@ -227,6 +231,33 @@ def _optimum_chunksize( ) +class LRUCache: + def __init__(self, maxsize: int) -> None: + self._cache: dict = {} + self.maxsize = maxsize + + def __getitem__(self, key): + value = self._cache.pop(key) + self._cache[key] = value + return value + + def __setitem__(self, key, value): + self._cache[key] = value + if len(self._cache) > self.maxsize: + self._cache.pop(next(iter(self._cache))) + + def __contains__(self, key): + return key in self._cache + + def __repr__(self): + return ( + f"<{self.__class__.__name__} maxsize={self.maxsize} cache={self._cache!r} >" + ) + + +CACHE = LRUCache(MAX_CACHE_SIZE) + + def as_lazy_data(data, chunks=None, asarray=False, meta=None, dims_fixed=None): """Convert the input array `data` to a :class:`dask.array.Array`. @@ -264,6 +295,8 @@ def as_lazy_data(data, chunks=None, asarray=False, meta=None, dims_fixed=None): but reduced by a factor if that exceeds the dask default chunksize. """ + from iris.fileformats.netcdf._thread_safe_nc import NetCDFDataProxy + if isinstance(data, ma.core.MaskedConstant): data = ma.masked_array(data.data, mask=data.mask) @@ -277,7 +310,7 @@ def as_lazy_data(data, chunks=None, asarray=False, meta=None, dims_fixed=None): if chunks is None: # No existing chunks : Make a chunk the shape of the entire input array # (but we will subdivide it if too big). - chunks = list(data.shape) + chunks = tuple(data.shape) # Adjust chunk size for better dask performance, # NOTE: but only if no shape dimension is zero, so that we can handle the @@ -291,9 +324,24 @@ def as_lazy_data(data, chunks=None, asarray=False, meta=None, dims_fixed=None): dims_fixed=dims_fixed, ) - if not is_lazy_data(data): - data = da.from_array(data, chunks=chunks, asarray=asarray, meta=meta) - return data + # Define a cache key for caching arrays created from NetCDFDataProxy objects. + # Creating new Dask arrays is relatively slow, therefore caching is beneficial + # if many cubes in the same file share coordinate arrays. + if isinstance(data, NetCDFDataProxy): + key = (repr(data), chunks, asarray, meta.dtype, type(meta)) + else: + key = None + + if is_lazy_data(data): + result = data + elif key in CACHE: + result = CACHE[key].copy() + else: + result = da.from_array(data, chunks=chunks, asarray=asarray, meta=meta) + if key is not None: + CACHE[key] = result.copy() + + return result def _co_realise_lazy_arrays(arrays): diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py index 821370ce6c..a0c13208ee 100644 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py @@ -161,7 +161,7 @@ def test_default_chunks_limiting(self, mocker): as_lazy_data(data) assert limitcall_patch.call_args_list == [ mock.call( - list(test_shape), + tuple(test_shape), shape=test_shape, dtype=np.dtype("f4"), dims_fixed=None, diff --git a/lib/iris/tests/unit/lazy_data/test_lrucache.py b/lib/iris/tests/unit/lazy_data/test_lrucache.py new file mode 100644 index 0000000000..2a8abb0cde --- /dev/null +++ b/lib/iris/tests/unit/lazy_data/test_lrucache.py @@ -0,0 +1,25 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Test function :func:`iris._lazy data.LRUCache`.""" + +from iris._lazy_data import LRUCache + + +def test_lrucache(): + cache = LRUCache(2) + + cache["a"] = 1 + + assert "a" in cache + assert cache["a"] == 1 + + cache["b"] = 2 + cache["c"] = 3 + + assert "a" not in cache + assert "b" in cache + assert "c" in cache + + assert str(cache) == "" From 799bdfd70a425a4520ac89b51326824654537120 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 18 Feb 2025 11:47:36 +0000 Subject: [PATCH 70/74] [pre-commit.ci] pre-commit autoupdate (#6315) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 7.1.1 → 7.1.2](https://github.com/PyCQA/flake8/compare/7.1.1...7.1.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6e4d98903f..e3ea8e0624 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,7 +45,7 @@ repos: additional_dependencies: [tomli] - repo: https://github.com/PyCQA/flake8 - rev: 7.1.1 + rev: 7.1.2 hooks: - id: flake8 types: [file, python] From 4dd050bd5dfd22f2bfe2bdd6cea7577899feb7ee Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Wed, 19 Feb 2025 11:59:19 +0000 Subject: [PATCH 71/74] replaced reference from build to python build (#6324) --- docs/src/developers_guide/release.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index c7ce230204..df6b2a44df 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -223,7 +223,7 @@ To do this perform the following steps. Create a conda environment with the appropriate conda packages to build the source distribution (``sdist``) and pure Python wheel (``bdist_wheel``):: - > conda create -n iris-pypi -c conda-forge --yes build twine + > conda create -n iris-pypi -c conda-forge --yes python-build twine > . activate iris-pypi Checkout the appropriate Iris ```` tag from the appropriate ````. From 50d9ed6bfea4bf2509385ff9f1755d78f53f3b3e Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 25 Oct 2024 15:44:30 +0100 Subject: [PATCH 72/74] WIP --- lib/iris/tests/unit/fileformats/cf/test_CFReader.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 7f37eb9f24..b29db7fae4 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -4,6 +4,10 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.cf.CFReader` class.""" +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests # isort:skip + from unittest import mock import numpy as np From 9eb29de01e6fd05909ee379e55e1a62ea56a5052 Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 25 Oct 2024 17:21:02 +0100 Subject: [PATCH 73/74] rebase --- lib/iris/fileformats/cf.py | 3 +- .../unit/fileformats/cf/test_CFReader.py | 99 +++++++++++++++++++ 2 files changed, 101 insertions(+), 1 deletion(-) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 21c2b3d122..e7e7586a1f 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -16,7 +16,6 @@ from abc import ABCMeta, abstractmethod from collections.abc import Iterable, MutableMapping -import contextlib import os import re from typing import ClassVar, Optional @@ -1448,11 +1447,13 @@ def _translate(self, variables): new_var.bounds = cf_var.bounds new_var.add_formula_term(cf_root, cf_term) self.cf_group[cf_name] = new_var + self.cf_group[cf_name].add_formula_term(cf_root, cf_term) # Determine the CF data variables. data_variable_names = ( set(netcdf_variable_names) - self.cf_group.non_data_variable_names ) + print("name") for name in data_variable_names: self.cf_group[name] = CFDataVariable(name, variables[name]) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index b29db7fae4..90178d0484 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -147,6 +147,7 @@ def _setup(self, mocker): mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", return_value=self.dataset, +<<<<<<< HEAD ) def test_create_formula_terms(self, mocker): @@ -181,6 +182,48 @@ def test_create_formula_terms(self, mocker): assert set(group.keys()) == set(bounds) for name in bounds: assert group[name].cf_data == getattr(self, name) +======= + ): + cf_group = CFReader("dummy").cf_group + self.assertEqual(len(cf_group), len(self.variables)) + # Check there is a singular data variable. + group = cf_group.data_variables + self.assertEqual(len(group), 1) + self.assertEqual(list(group.keys()), ["temp"]) + self.assertIs(group["temp"].cf_data, self.temp) + # Check there are three coordinates. + group = cf_group.coordinates + self.assertEqual(len(group), 3) + coordinates = ["height", "lat", "lon"] + self.assertEqual(set(group.keys()), set(coordinates)) + for name in coordinates: + self.assertIs(group[name].cf_data, getattr(self, name)) + # Check there are three auxiliary coordinates. + group = cf_group.auxiliary_coordinates + self.assertEqual(len(group), 3) + aux_coordinates = ["delta", "sigma", "orography"] + self.assertEqual(set(group.keys()), set(aux_coordinates)) + for name in aux_coordinates: + self.assertIs(group[name].cf_data, getattr(self, name)) + # Check there are three bounds. + group = cf_group.bounds + self.assertEqual(len(group), 3) + bounds = ["height_bnds", "delta_bnds", "sigma_bnds"] + self.assertEqual(set(group.keys()), set(bounds)) + for name in bounds: + self.assertEqual(group[name].cf_data, getattr(self, name)) + # Check the formula terms contains all expected terms + formula_terms = cf_group.formula_terms + expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] + expected_group = { + k: v + for k, v in dict( + **cf_group.auxiliary_coordinates, **cf_group.bounds + ).items() + if k in expected_keys + } + self.assertEqual(set(expected_group.items()), set(formula_terms.items())) +>>>>>>> b75621090 (fix tests and example) class Test_build_cf_groups__formula_terms: @@ -243,6 +286,7 @@ def _setup(self, mocker): mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", return_value=self.dataset, +<<<<<<< HEAD ) def test_associate_formula_terms_with_data_variable(self, mocker): @@ -283,6 +327,61 @@ def test_associate_formula_terms_with_data_variable(self, mocker): assert len(aux_coord_group.bounds) == 1 assert name_bnds in aux_coord_group.bounds assert aux_coord_group[name_bnds].cf_data is getattr(self, name_bnds) +======= + ): + cf_group = CFReader("dummy").cf_group + self.assertEqual(len(cf_group), len(self.variables)) + # Check the cf-group associated with the data variable. + temp_cf_group = cf_group["temp"].cf_group + # Check the data variable is associated with eight variables. + self.assertEqual(len(temp_cf_group), 8) + # Check there are three coordinates. + group = temp_cf_group.coordinates + self.assertEqual(len(group), 3) + coordinates = ["height", "lat", "lon"] + self.assertEqual(set(group.keys()), set(coordinates)) + for name in coordinates: + self.assertIs(group[name].cf_data, getattr(self, name)) + # Check the height coordinate is bounded. + group = group["height"].cf_group + self.assertEqual(len(group.bounds), 1) + self.assertIn("height_bnds", group.bounds) + self.assertIs(group["height_bnds"].cf_data, self.height_bnds) + # Check there are five auxiliary coordinates. + group = temp_cf_group.auxiliary_coordinates + self.assertEqual(len(group), 5) + aux_coordinates = ["delta", "sigma", "orography", "x", "y"] + self.assertEqual(set(group.keys()), set(aux_coordinates)) + formula_terms = cf_group.formula_terms + for name in aux_coordinates: + self.assertIs(group[name].cf_data, getattr(self, name)) + # Check the terms by root. + for name, term in zip(aux_coordinates, ["a", "b", "orog"]): + self.assertEqual( + formula_terms[name].cf_terms_by_root, dict(height=term) + ) + # Check the bounded auxiliary coordinates. + for name, name_bnds in zip( + ["delta", "sigma"], ["delta_bnds", "sigma_bnds"] + ): + aux_coord_group = group[name].cf_group + self.assertEqual(len(aux_coord_group.bounds), 1) + self.assertIn(name_bnds, aux_coord_group.bounds) + self.assertIs( + aux_coord_group[name_bnds].cf_data, + getattr(self, name_bnds), + ) + # Check the formula terms contains all expected terms + expected_keys = ["delta", "sigma", "orography", "delta_bnds", "sigma_bnds"] + expected_group = { + k: v + for k, v in dict( + **cf_group.auxiliary_coordinates, **cf_group.bounds + ).items() + if k in expected_keys + } + self.assertEqual(set(expected_group.items()), set(formula_terms.items())) +>>>>>>> b75621090 (fix tests and example) def test_promote_reference(self): cf_group = CFReader("dummy").cf_group From 656ee1c1d7f669e944d661b648479558da132a87 Mon Sep 17 00:00:00 2001 From: Henry Wright Date: Fri, 28 Feb 2025 16:47:15 +0000 Subject: [PATCH 74/74] WIP promoting the bounds to a cube --- lib/iris/__init__.py | 5 ++ lib/iris/fileformats/cf.py | 134 +++++++++++++++++++++++++------------ 2 files changed, 96 insertions(+), 43 deletions(-) diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index bc2b84709d..8451cd59be 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -149,6 +149,7 @@ def __init__( pandas_ndim=False, save_split_attrs=False, date_microseconds=False, + derived_bounds=False, ): """Container for run-time options controls. @@ -182,6 +183,8 @@ def __init__( behaviour, such as when using :class:`~iris.Constraint`, and you may need to defend against floating point precision issues where you didn't need to before. + derived_bounds : bool, default=False + When True, uses the new form for deriving bounds with the load. """ # The flag 'example_future_flag' is provided as a reference for the @@ -195,6 +198,7 @@ def __init__( self.__dict__["pandas_ndim"] = pandas_ndim self.__dict__["save_split_attrs"] = save_split_attrs self.__dict__["date_microseconds"] = date_microseconds + self.__dict__["derived_bounds"] = derived_bounds # TODO: next major release: set IrisDeprecation to subclass # DeprecationWarning instead of UserWarning. @@ -208,6 +212,7 @@ def __repr__(self): self.pandas_ndim, self.save_split_attrs, self.date_microseconds, + self.derived_bounds, ) # deprecated_options = {'example_future_flag': 'warning',} diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index e7e7586a1f..af3c0e448e 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -16,6 +16,7 @@ from abc import ABCMeta, abstractmethod from collections.abc import Iterable, MutableMapping +import contextlib import os import re from typing import ClassVar, Optional @@ -94,6 +95,8 @@ def __init__(self, name, data): #: CF-netCDF formula terms that his variable participates in. self.cf_terms_by_root = {} + self._to_be_promoted = False + self.cf_attrs_reset() @staticmethod @@ -1416,44 +1419,70 @@ def _translate(self, variables): # Identify and register all CF formula terms. formula_terms = _CFFormulaTermsVariable.identify(variables) - for cf_var in formula_terms.values(): - for cf_root, cf_term in cf_var.cf_terms_by_root.items(): - bounds_name = None - cf_root_coord = self.cf_group.coordinates.get(cf_root) - with contextlib.suppress(AttributeError): - # Copes with cf_root_coord not existing, OR not having - # `bounds` attribute. - bounds_name = cf_root_coord.bounds - if bounds_name is not None: - # This will error if more or less than 1 variable is found. - (bounds_var,) = [ - f - for f in formula_terms.values() - if f.cf_terms_by_root.get(bounds_name) == cf_term - ] - if bounds_var != cf_var: - cf_var.bounds = bounds_var.cf_name - new_var = CFBoundaryVariable( - bounds_var.cf_name, bounds_var.cf_data - ) - new_var.add_formula_term(bounds_name, cf_term) - self.cf_group[bounds_var.cf_name] = new_var - - if cf_root not in self.cf_group.bounds: - cf_name = cf_var.cf_name - if cf_var.cf_name not in self.cf_group: - new_var = CFAuxiliaryCoordinateVariable(cf_name, cf_var.cf_data) - if hasattr(cf_var, "bounds"): - new_var.bounds = cf_var.bounds - new_var.add_formula_term(cf_root, cf_term) - self.cf_group[cf_name] = new_var + if iris.FUTURE.derived_bounds: + # cf_var = CFFormulaTermsVariable (loops through everything that appears in formula terms) + for cf_var in formula_terms.values(): + # eg. eta:'a' | cf_root = eta and cf_term = a. cf_var.cf_terms_by_root = {'eta': 'a'} (looking at all appearances in formula terms) + for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + # gets set to the bounds of the coord from cf_root_coord + bounds_name = None + # cf_root_coord = CFCoordinateVariable of the coordinate relating to the root + cf_root_coord = self.cf_group.coordinates.get(cf_root) + if cf_root_coord is None: + cf_root_coord = self.cf_group.auxiliary_coordinates.get(cf_root) + with contextlib.suppress(AttributeError): + # Copes with cf_root_coord not existing, OR not having + # `bounds` attribute. + bounds_name = cf_root_coord.bounds + + if bounds_name is not None: + try: + # This will error if more or less than 1 variable is found. + # TODO: try a try/except here or logical alternative + (bounds_var,) = [ + # loop through all formula terms and add them if they have a cf_term_by_root + # where (bounds of cf_root): cf_term (same as before) + f + for f in formula_terms.values() + if f.cf_terms_by_root.get(bounds_name) == cf_term + ] + if bounds_var != cf_var: + cf_var.bounds = bounds_var.cf_name + new_var = CFBoundaryVariable( + bounds_var.cf_name, bounds_var.cf_data + ) + new_var.add_formula_term(bounds_name, cf_term) + self.cf_group[bounds_var.cf_name] = new_var + except ValueError: + # Modify the boundary_variable set _to_be_promoted to True + self.cf_group.get(bounds_name)._to_be_promoted = True + + if cf_root not in self.cf_group.bounds: + cf_name = cf_var.cf_name + if cf_var.cf_name not in self.cf_group: + new_var = CFAuxiliaryCoordinateVariable( + cf_name, cf_var.cf_data + ) + if hasattr(cf_var, "bounds"): + new_var.bounds = cf_var.bounds + new_var.add_formula_term(cf_root, cf_term) + self.cf_group[cf_name] = new_var + else: + for cf_var in formula_terms.values(): + for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + # Ignore formula terms owned by a bounds variable. + if cf_root not in self.cf_group.bounds: + cf_name = cf_var.cf_name + if cf_var.cf_name not in self.cf_group: + self.cf_group[cf_name] = CFAuxiliaryCoordinateVariable( + cf_name, cf_var.cf_data + ) self.cf_group[cf_name].add_formula_term(cf_root, cf_term) # Determine the CF data variables. data_variable_names = ( set(netcdf_variable_names) - self.cf_group.non_data_variable_names ) - print("name") for name in data_variable_names: self.cf_group[name] = CFDataVariable(name, variables[name]) @@ -1477,7 +1506,7 @@ def _span_check( """Sanity check dimensionality.""" var = self.cf_group[var_name] # No span check is necessary if variable is attached to a mesh. - if is_mesh_var or var.spans(cf_variable): + if (is_mesh_var or var.spans(cf_variable)) and not var._to_be_promoted: cf_group[var_name] = var else: # Register the ignored variable. @@ -1521,12 +1550,13 @@ def _span_check( for cf_name in match: _span_check(cf_name) - if hasattr(cf_variable, "bounds"): - if cf_variable.bounds not in cf_group: - bounds_var = self.cf_group[cf_variable.bounds] - # TODO: warning if fails spans - if bounds_var.spans(cf_variable): - cf_group[bounds_var.cf_name] = bounds_var + if iris.FUTURE.derived_bounds: + if hasattr(cf_variable, "bounds"): + if cf_variable.bounds not in cf_group: + bounds_var = self.cf_group[cf_variable.bounds] + # TODO: warning if fails spans + if bounds_var.spans(cf_variable): + cf_group[bounds_var.cf_name] = bounds_var # Build CF data variable relationships. if isinstance(cf_variable, CFDataVariable): @@ -1570,13 +1600,30 @@ def _span_check( # may be promoted to a CFDataVariable and restrict promotion to only # those formula terms that are reference surface/phenomenon. for cf_var in self.cf_group.formula_terms.values(): - if self.cf_group[cf_var.cf_name] is CFBoundaryVariable: - continue + if iris.FUTURE.derived_bounds: + if self.cf_group[cf_var.cf_name] is CFBoundaryVariable: + continue + else: + for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + cf_root_var = self.cf_group[cf_root] + if not hasattr(cf_root_var, "standard_name"): + continue + name = cf_root_var.standard_name or cf_root_var.long_name + terms = reference_terms.get(name, []) + if isinstance(terms, str) or not isinstance(terms, Iterable): + terms = [terms] + cf_var_name = cf_var.cf_name + if ( + cf_term in terms + and cf_var_name not in self.cf_group.promoted + ): + data_var = CFDataVariable(cf_var_name, cf_var.cf_data) + self.cf_group.promoted[cf_var_name] = data_var + _build(data_var) + break else: for cf_root, cf_term in cf_var.cf_terms_by_root.items(): cf_root_var = self.cf_group[cf_root] - if not hasattr(cf_root_var, "standard_name"): - continue name = cf_root_var.standard_name or cf_root_var.long_name terms = reference_terms.get(name, []) if isinstance(terms, str) or not isinstance(terms, Iterable): @@ -1587,6 +1634,7 @@ def _span_check( self.cf_group.promoted[cf_var_name] = data_var _build(data_var) break + # Promote any ignored variables. promoted = set() not_promoted = ignored.difference(promoted)