diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 5f567f7835..da8d8823d8 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.02.1 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2025.02.2 diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index 7a8272e202..1ea3f79cc8 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.02.1 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2025.02.2 secrets: inherit diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index c7ce230204..df6b2a44df 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -223,7 +223,7 @@ To do this perform the following steps. Create a conda environment with the appropriate conda packages to build the source distribution (``sdist``) and pure Python wheel (``bdist_wheel``):: - > conda create -n iris-pypi -c conda-forge --yes build twine + > conda create -n iris-pypi -c conda-forge --yes python-build twine > . activate iris-pypi Checkout the appropriate Iris ```` tag from the appropriate ````. diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/further_topics/controlling_merge.rst index 85fe46e745..8868306d10 100644 --- a/docs/src/further_topics/controlling_merge.rst +++ b/docs/src/further_topics/controlling_merge.rst @@ -13,9 +13,10 @@ demonstration, we will revert back to this legacy loading behaviour as follows: >>> iris.LOAD_POLICY.set("legacy") .. note:: - The default settings for :data:`iris.LOAD_POLICY` effectively implements some version of the following demonstration - automatically upon loading. It may still be worth being aware of how to handle this manually if an even finer degree - of control is required. + Since Iris v3.11, the default settings for :data:`iris.LOAD_POLICY` effectively + implements some version of the following demonstration **automatically** upon + loading. It may still be worth being aware of how to handle this manually, if an + even finer degree of control is required. How to Merge Cubes When Coordinates Differ ------------------------------------------ diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index bc2b84709d..a5f630359d 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -87,18 +87,33 @@ def callback(cube, field, filename): """ -from collections.abc import Iterable import contextlib import glob import importlib -import itertools import os.path import threading -from typing import Callable, Literal, Mapping +from typing import Callable, Literal import iris._constraints import iris.config import iris.io +from iris.io import save +from iris.loading import LOAD_POLICY as _LOAD_POLICY +from iris.loading import ( + CombineOptions, + LoadPolicy, + load, + load_cube, + load_cubes, + load_raw, +) + +# NOTE: we make an independent local 'LOAD_POLICY' definition here, just so that we +# can ensure an entry for it in our API documentation page. + +#: A control object containing the current file loading strategy options. +LOAD_POLICY = _LOAD_POLICY + from ._deprecation import IrisDeprecation, warn_deprecated @@ -117,6 +132,7 @@ def callback(cube, field, filename): # Restrict the names imported when using "from iris import *" __all__ = [ "AttributeConstraint", + "CombineOptions", "Constraint", "FUTURE", "Future", @@ -283,517 +299,6 @@ def context(self, **kwargs): _update(site_configuration) -def _generate_cubes(uris, callback, constraints): - """Return a generator of cubes given the URIs and a callback.""" - if isinstance(uris, str) or not isinstance(uris, Iterable): - # Make a string, or other single item, into an iterable. - uris = [uris] - - # Group collections of uris by their iris handler - # Create list of tuples relating schemes to part names - uri_tuples = sorted(iris.io.decode_uri(uri) for uri in uris) - - for scheme, groups in itertools.groupby(uri_tuples, key=lambda x: x[0]): - # Call each scheme handler with the appropriate URIs - if scheme == "file": - part_names = [x[1] for x in groups] - for cube in iris.io.load_files(part_names, callback, constraints): - yield cube - elif scheme in ["http", "https"]: - urls = [":".join(x) for x in groups] - for cube in iris.io.load_http(urls, callback): - yield cube - elif scheme == "data": - data_objects = [x[1] for x in groups] - for cube in iris.io.load_data_objects(data_objects, callback): - yield cube - else: - raise ValueError("Iris cannot handle the URI scheme: %s" % scheme) - - -def _load_collection(uris, constraints=None, callback=None): - from iris.cube import _CubeFilterCollection - from iris.fileformats.rules import _MULTIREF_DETECTION - - try: - # This routine is called once per iris load operation. - # Control of the "multiple refs" handling is implicit in this routine - # NOTE: detection of multiple reference fields, and it's enabling of post-load - # concatenation, is triggered **per-load, not per-cube** - # This behaves unexpectefly for "iris.load_cubes" : a post-concatenation is - # triggered for all cubes or none, not per-cube (i.e. per constraint). - _MULTIREF_DETECTION.found_multiple_refs = False - - cubes = _generate_cubes(uris, callback, constraints) - result = _CubeFilterCollection.from_cubes(cubes, constraints) - except EOFError as e: - raise iris.exceptions.TranslationError( - "The file appears empty or incomplete: {!r}".format(str(e)) - ) - return result - - -class LoadPolicy(threading.local): - """A container for loading strategy options. - - Controls merge/concatenate usage during loading. - - Also controls the detection and handling of cases where a hybrid coordinate - uses multiple reference fields : for example, a UM file which contains a series of - fields describing time-varying orography. - - Options can be set directly, or via :meth:`~iris.LoadPolicy.set`, or changed for - the scope of a code block with :meth:`~iris.LoadPolicy.context`. - - .. note :: - - The default behaviour will "fix" loading for cases like the one just described. - However this is not strictly backwards-compatible. If this causes problems, - you can force identical loading behaviour to earlier Iris versions with - ``LOAD_POLICY.set("legacy")`` or equivalent. - - .. testsetup:: - - from iris import LOAD_POLICY - - Notes - ----- - The individual configurable options are : - - * ``support_multiple_references`` = True / False - When enabled, the presence of multiple aux-factory reference cubes, which merge - to define a extra dimension, will add that dimension to the loaded cubes. - This is essential for correct support of time-dependent hybrid coordinates (i.e. - aux factories) when loading from fields-based data (e.g. PP or GRIB). - For example (notably) time-dependent orography in UM data on hybrid-heights. - - In addition, when such multiple references are detected, an extra concatenate - step is added to the 'merge_concat_sequence' (see below), if none is already - configured there. - - * ``merge_concat_sequence`` = "m" / "c" / "cm" / "mc" - Specifies whether to merge, or concatenate, or both in either order. - This is the "combine" operation which is applied to loaded data. - - * ``repeat_until_unchanged`` = True / False - When enabled, the configured "combine" operation will be repeated until the - result is stable (no more cubes are combined). - - Several common sets of options are provided in :data:`~iris.LOAD_POLICY.SETTINGS` : - - * ``"legacy"`` - Produces results identical to Iris versions < 3.11, i.e. before the varying - hybrid references were supported. - - * ``"default"`` - As "legacy" except that ``support_multiple_references=True``. This differs - from "legacy" only when multiple mergeable reference fields are encountered, - in which case incoming cubes are extended into the extra dimension, and a - concatenate step is added. - - * ``"recommended"`` - Enables multiple reference handling, and applies a merge step followed by - a concatenate step. - - * ``"comprehensive"`` - Like "recommended", but will also *repeat* the merge+concatenate steps until no - further change is produced. - - .. note :: - - The 'comprehensive' policy makes a maximum effort to reduce the number of - cubes to a minimum. However, it still cannot combine cubes with a mixture - of matching dimension and scalar coordinates. This may be supported at - some later date, but for now is not possible without specific user actions. - - .. Note :: - - See also : :ref:`controlling_merge`. - - Examples - -------- - >>> LOAD_POLICY.set("legacy") - >>> print(LOAD_POLICY) - LoadPolicy(support_multiple_references=False, merge_concat_sequence='m', repeat_until_unchanged=False) - >>> LOAD_POLICY.support_multiple_references = True - >>> print(LOAD_POLICY) - LoadPolicy(support_multiple_references=True, merge_concat_sequence='m', repeat_until_unchanged=False) - >>> LOAD_POLICY.set(merge_concat_sequence="cm") - >>> print(LOAD_POLICY) - LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) - >>> with LOAD_POLICY.context("comprehensive"): - ... print(LOAD_POLICY) - LoadPolicy(support_multiple_references=True, merge_concat_sequence='mc', repeat_until_unchanged=True) - >>> print(LOAD_POLICY) - LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) - - """ - - # Useful constants - OPTION_KEYS = ( - "support_multiple_references", - "merge_concat_sequence", - "repeat_until_unchanged", - ) - _OPTIONS_ALLOWED_VALUES = { - "support_multiple_references": (False, True), - "merge_concat_sequence": ("", "m", "c", "mc", "cm"), - "repeat_until_unchanged": (False, True), - } - SETTINGS = { - "legacy": dict( - support_multiple_references=False, - merge_concat_sequence="m", - repeat_until_unchanged=False, - ), - "default": dict( - support_multiple_references=True, - merge_concat_sequence="m", - repeat_until_unchanged=False, - ), - "recommended": dict( - support_multiple_references=True, - merge_concat_sequence="mc", - repeat_until_unchanged=False, - ), - "comprehensive": dict( - support_multiple_references=True, - merge_concat_sequence="mc", - repeat_until_unchanged=True, - ), - } - - def __init__(self, options: str | dict | None = None, **kwargs): - """Create loading strategy control object.""" - self.set("default") - self.set(options, **kwargs) - - def __setattr__(self, key, value): - if key not in self.OPTION_KEYS: - raise KeyError(f"LoadPolicy object has no property '{key}'.") - - allowed_values = self._OPTIONS_ALLOWED_VALUES[key] - if value not in allowed_values: - msg = ( - f"{value!r} is not a valid setting for LoadPolicy.{key} : " - f"must be one of '{allowed_values}'." - ) - raise ValueError(msg) - - self.__dict__[key] = value - - def set(self, options: str | dict | None = None, **kwargs): - """Set new options. - - Parameters - ---------- - * options : str or dict, optional - A dictionary of options values, or the name of one of the - :data:`~iris.LoadPolicy.SETTINGS` standard option sets, - e.g. "legacy" or "comprehensive". - * kwargs : dict - Individual option settings, from :data:`~iris.LoadPolicy.OPTION_KEYS`. - - Note - ---- - Keyword arguments are applied after the 'options' arg, and - so will take precedence. - - """ - if options is None: - options = {} - elif isinstance(options, str) and options in self.SETTINGS: - options = self.SETTINGS[options] - elif not isinstance(options, Mapping): - msg = ( - f"Invalid arg options={options!r} : " - f"must be a dict, or one of {tuple(self.SETTINGS.keys())}" - ) - raise TypeError(msg) - - # Override any options with keywords - options.update(**kwargs) - bad_keys = [key for key in options if key not in self.OPTION_KEYS] - if bad_keys: - msg = f"Unknown options {bad_keys} : valid options are {self.OPTION_KEYS}." - raise ValueError(msg) - - # Implement all options by changing own content. - for key, value in options.items(): - setattr(self, key, value) - - def settings(self): - """Return an options dict containing the current settings.""" - return {key: getattr(self, key) for key in self.OPTION_KEYS} - - def __repr__(self): - msg = f"{self.__class__.__name__}(" - msg += ", ".join(f"{key}={getattr(self, key)!r}" for key in self.OPTION_KEYS) - msg += ")" - return msg - - @contextlib.contextmanager - def context(self, settings=None, **kwargs): - """Return a context manager applying given options. - - Parameters - ---------- - settings : str or dict - Options dictionary or name, as for :meth:`~LoadPolicy.set`. - kwargs : dict - Option values, as for :meth:`~LoadPolicy.set`. - - Examples - -------- - .. testsetup:: - - import iris - from iris import LOAD_POLICY, sample_data_path - - >>> path = sample_data_path("time_varying_hybrid_height", "*.pp") - >>> with LOAD_POLICY.context("legacy"): - ... cubes = iris.load(path, "x_wind") - >>> print(cubes) - 0: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192) - 1: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192) - 2: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192) - >>> - >>> with LOAD_POLICY.context("recommended"): - ... cubes = iris.load(path, "x_wind") - >>> print(cubes) - 0: x_wind / (m s-1) (model_level_number: 5; time: 15; latitude: 144; longitude: 192) - """ - # Save the current state - saved_settings = self.settings() - - # Apply the new options and execute the context - try: - self.set(settings, **kwargs) - yield - finally: - # Re-establish the former state - self.set(saved_settings) - - -#: A control object containing the current file loading options. -LOAD_POLICY = LoadPolicy() - - -def _combine_cubes(cubes, options, merge_require_unique): - """Combine cubes as for load, according to "loading policy" options. - - Applies :meth:`~iris.cube.CubeList.merge`/:meth:`~iris.cube.CubeList.concatenate` - steps to the given cubes, as determined by the 'settings'. - - Parameters - ---------- - cubes : list of :class:`~iris.cube.Cube` - A list of cubes to combine. - options : dict - Settings, as described for :meth:`iris.LOAD_POLICY.set`. - Defaults to current :meth:`iris.LOAD_POLICY.settings`. - merge_require_unique : bool - Value for the 'unique' keyword in any merge operations. - - Returns - ------- - :class:`~iris.cube.CubeList` - - .. Note:: - The ``support_multiple_references`` keyword/property has no effect on the - :func:`_combine_cubes` operation : it only takes effect during a load operation. - - Notes - ----- - TODO: make this public API in future. - At that point, change the API to support (options=None, **kwargs) + add testing of - those modes (notably arg type = None / str / dict). - - """ - from iris.cube import CubeList - - if not isinstance(cubes, CubeList): - cubes = CubeList(cubes) - - while True: - n_original_cubes = len(cubes) - sequence = options["merge_concat_sequence"] - - if sequence[0] == "c": - # concat if it comes first - cubes = cubes.concatenate() - if "m" in sequence: - # merge if requested - cubes = cubes.merge(unique=merge_require_unique) - if sequence[-1] == "c": - # concat if it comes last - cubes = cubes.concatenate() - - # Repeat if requested, *and* this step reduced the number of cubes - if not options["repeat_until_unchanged"] or len(cubes) >= n_original_cubes: - break - - return cubes - - -def _combine_load_cubes(cubes, merge_require_unique=False): - # A special version to call _combine_cubes while also implementing the - # _MULTIREF_DETECTION behaviour - options = LOAD_POLICY.settings() - if ( - options["support_multiple_references"] - and "c" not in options["merge_concat_sequence"] - ): - # Add a concatenate to implement the "multiref triggers concatenate" mechanism - from iris.fileformats.rules import _MULTIREF_DETECTION - - if _MULTIREF_DETECTION.found_multiple_refs: - options["merge_concat_sequence"] += "c" - - return _combine_cubes(cubes, options, merge_require_unique=merge_require_unique) - - -def load(uris, constraints=None, callback=None): - """Load any number of Cubes for each constraint. - - For a full description of the arguments, please see the module - documentation for :mod:`iris`. - - Parameters - ---------- - uris : str or :class:`pathlib.PurePath` - One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. - If supplying a URL, only OPeNDAP Data Sources are supported. - constraints : optional - One or more constraints. - callback : optional - A modifier/filter function. - - Returns - ------- - :class:`iris.cube.CubeList` - An :class:`iris.cube.CubeList`. Note that there is no inherent order - to this :class:`iris.cube.CubeList` and it should be treated as if it - were random. - - """ - cubes = _load_collection(uris, constraints, callback).combined().cubes() - return cubes - - -def load_cube(uris, constraint=None, callback=None): - """Load a single cube. - - For a full description of the arguments, please see the module - documentation for :mod:`iris`. - - Parameters - ---------- - uris : - One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. - If supplying a URL, only OPeNDAP Data Sources are supported. - constraints : optional - A constraint. - callback : optional - A modifier/filter function. - - Returns - ------- - :class:`iris.cube.Cube` - - """ - constraints = iris._constraints.list_of_constraints(constraint) - if len(constraints) != 1: - raise ValueError("only a single constraint is allowed") - - cubes = _load_collection(uris, constraints, callback).combined(unique=False).cubes() - - try: - # NOTE: this call currently retained to preserve the legacy exceptions - # TODO: replace with simple testing to duplicate the relevant error cases - cube = cubes.merge_cube() - except iris.exceptions.MergeError as e: - raise iris.exceptions.ConstraintMismatchError(str(e)) - except ValueError: - raise iris.exceptions.ConstraintMismatchError("no cubes found") - - return cube - - -def load_cubes(uris, constraints=None, callback=None): - """Load exactly one Cube for each constraint. - - For a full description of the arguments, please see the module - documentation for :mod:`iris`. - - Parameters - ---------- - uris : - One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. - If supplying a URL, only OPeNDAP Data Sources are supported. - constraints : optional - One or more constraints. - callback : optional - A modifier/filter function. - - Returns - ------- - :class:`iris.cube.CubeList` - An :class:`iris.cube.CubeList`. Note that there is no inherent order - to this :class:`iris.cube.CubeList` and it should be treated as if it - were random. - - """ - # Merge the incoming cubes - collection = _load_collection(uris, constraints, callback).combined() - - # Make sure we have exactly one merged cube per constraint - bad_pairs = [pair for pair in collection.pairs if len(pair) != 1] - if bad_pairs: - fmt = " {} -> {} cubes" - bits = [fmt.format(pair.constraint, len(pair)) for pair in bad_pairs] - msg = "\n" + "\n".join(bits) - raise iris.exceptions.ConstraintMismatchError(msg) - - return collection.cubes() - - -def load_raw(uris, constraints=None, callback=None): - """Load non-merged cubes. - - This function is provided for those occasions where the automatic - combination of cubes into higher-dimensional cubes is undesirable. - However, it is intended as a tool of last resort! If you experience - a problem with the automatic combination process then please raise - an issue with the Iris developers. - - For a full description of the arguments, please see the module - documentation for :mod:`iris`. - - Parameters - ---------- - uris : - One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. - If supplying a URL, only OPeNDAP Data Sources are supported. - constraints : optional - One or more constraints. - callback : optional - A modifier/filter function. - - Returns - ------- - :class:`iris.cube.CubeList` - - """ - from iris.fileformats.um._fast_load import _raw_structured_loading - - with _raw_structured_loading(): - return _load_collection(uris, constraints, callback).cubes() - - -save = iris.io.save - - def sample_data_path(*path_to_join): """Given the sample data resource, returns the full path to the file. diff --git a/lib/iris/_combine/__init__.py b/lib/iris/_combine/__init__.py new file mode 100644 index 0000000000..a8ae3bcf8e --- /dev/null +++ b/lib/iris/_combine/__init__.py @@ -0,0 +1,192 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Generalised mechanism for combining cubes into larger ones. + +Integrates merge and concatenate with the cube-equalisation options and the promotion of +hybrid reference dimensions on loading. + +This is effectively a generalised "combine cubes" operation, but it is not (yet) +publicly available. +""" + +import threading +from typing import Mapping + + +class CombineOptions(threading.local): + """A container for cube combination options. + + Controls for generalised merge/concatenate options. These are used as controls for + both the :func:`iris.util.combine_cubes` utility method and the core Iris loading + functions : see also :data:`iris.loading.LoadPolicy`. + + It specifies a number of possible operations which may be applied to a list of + cubes, in a definite sequence, all of which tend to combine cubes into a smaller + number of larger or higher-dimensional cubes. + + Notes + ----- + The individual configurable options are : + + * ``equalise_cube_kwargs`` = dict or None + If not None, this enables and provides keyword controls for a call to the + :func:`iris.util.equalise_cubes` utility. If active, this always occurs + **before** any merge/concatenate phase. + + * ``merge_concat_sequence`` = "m" / "c" / "cm" / "mc" + Specifies whether to apply :meth:`~iris.cube.CubeList.merge`, or + :meth:`~iris.cube.CubeList.concatenate` operations, or both, in either order. + + * ``merge_uses_unique`` = True / False + When True, any merge operation will error if its result contains multiple + identical cubes. Otherwise (unique=False), that is a permitted result. + + .. Note:: + + By default, in a normal :meth:`~iris.cube.CubeList.merge` operation on a + :class:`~iris.cube.CubeList`, unique is ``True`` unless specified otherwise. + For loading operations, however, the default is ``unique=False``, as this + is required to make sense when making for multiple + + * ``repeat_until_unchanged`` = True / False + When enabled, the configured "combine" operation will be repeated until the + result is stable (no more cubes are combined). + + Several common sets of options are provided in :data:`~iris.LOAD_POLICY.SETTINGS` : + + * ``"legacy"`` + Produces loading behaviour identical to Iris versions < 3.11, i.e. before the + varying hybrid references were supported. + + * ``"default"`` + As "legacy" except that ``support_multiple_references=True``. This differs + from "legacy" only when multiple mergeable reference fields are encountered, + in which case incoming cubes are extended into the extra dimension, and a + concatenate step is added. + + * ``"recommended"`` + Enables multiple reference handling, *and* applies a merge step followed by + a concatenate step. + + * ``"comprehensive"`` + Like "recommended", but will also *repeat* the merge+concatenate steps until no + further change is produced. + + .. note :: + + The 'comprehensive' policy makes a maximum effort to reduce the number of + cubes to a minimum. However, it still cannot combine cubes with a mixture + of matching dimension and scalar coordinates. This may be supported at + some later date, but for now is not possible without specific user actions. + + .. Note :: + + See also : :ref:`controlling_merge`. + + """ + + # Useful constants + OPTION_KEYS = ( + # "support_multiple_references", + "merge_concat_sequence", + "repeat_until_unchanged", + ) + _OPTIONS_ALLOWED_VALUES = { + # "support_multiple_references": (False, True), + "merge_concat_sequence": ("", "m", "c", "mc", "cm"), + "repeat_until_unchanged": (False, True), + } + SETTING_NAMES = ("legacy", "default", "recommended", "comprehensive") + SETTINGS = { + "legacy": dict( + # support_multiple_references=False, + merge_concat_sequence="m", + repeat_until_unchanged=False, + ), + "default": dict( + # support_multiple_references=True, + merge_concat_sequence="m", + repeat_until_unchanged=False, + ), + "recommended": dict( + # support_multiple_references=True, + merge_concat_sequence="mc", + repeat_until_unchanged=False, + ), + "comprehensive": dict( + # support_multiple_references=True, + merge_concat_sequence="mc", + repeat_until_unchanged=True, + ), + } + + def __init__(self, options: str | dict | None = None, **kwargs): + """Create loading strategy control object.""" + self.set("default") + self.set(options, **kwargs) + + def __setattr__(self, key, value): + if key not in self.OPTION_KEYS: + raise KeyError(f"LoadPolicy object has no property '{key}'.") + + allowed_values = self._OPTIONS_ALLOWED_VALUES[key] + if value not in allowed_values: + msg = ( + f"{value!r} is not a valid setting for LoadPolicy.{key} : " + f"must be one of '{allowed_values}'." + ) + raise ValueError(msg) + + self.__dict__[key] = value + + def set(self, options: str | dict | None = None, **kwargs): + """Set new options. + + Parameters + ---------- + * options : str or dict, optional + A dictionary of options values, or the name of one of the + :data:`~iris.LoadPolicy.SETTINGS` standard option sets, + e.g. "legacy" or "comprehensive". + * kwargs : dict + Individual option settings, from :data:`~iris.LoadPolicy.OPTION_KEYS`. + + Note + ---- + Keyword arguments are applied after the 'options' arg, and + so will take precedence. + + """ + if options is None: + options = {} + elif isinstance(options, str) and options in self.SETTINGS: + options = self.SETTINGS[options] + elif not isinstance(options, Mapping): + msg = ( + f"Invalid arg options={options!r} : " + f"must be a dict, or one of {tuple(self.SETTINGS.keys())}" + ) + raise TypeError(msg) + + # Override any options with keywords + options.update(**kwargs) + bad_keys = [key for key in options if key not in self.OPTION_KEYS] + if bad_keys: + msg = f"Unknown options {bad_keys} : valid options are {self.OPTION_KEYS}." + raise ValueError(msg) + + # Implement all options by changing own content. + for key, value in options.items(): + setattr(self, key, value) + + def settings(self): + """Return an options dict containing the current settings.""" + return {key: getattr(self, key) for key in self.OPTION_KEYS} + + def __repr__(self): + msg = f"{self.__class__.__name__}(" + msg += ", ".join(f"{key}={getattr(self, key)!r}" for key in self.OPTION_KEYS) + msg += ")" + return msg diff --git a/lib/iris/_combine/_combine_functions.py b/lib/iris/_combine/_combine_functions.py new file mode 100644 index 0000000000..3683eb8939 --- /dev/null +++ b/lib/iris/_combine/_combine_functions.py @@ -0,0 +1,81 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Private functions supporting the combine_cubes and loading operations. + +Placed in a separate submodule, purely so that iris.loading can import +iris._combine.CombineOptions without causing a circular import problem. +For legacy reasons, we are obliged to expose the iris load_xxx functions in +iris.__all__, so it must be possible to import from iris.loading into a +partially initalised iris main module. +But do we want to import from iris.cube here, to type these routine properly. +""" + +from typing import List + +import iris +from iris import LOAD_POLICY +from iris.cube import Cube, CubeList + + +def _combine_cubes_inner(cubes: List[Cube], options: dict) -> CubeList: + """Combine cubes, according to "combine options". + + As described for the main "iris.utils.combine_cubes". + + Parameters + ---------- + cubes : list of Cube + Cubes to combine. + + options : dict + A list of options, as described in CombineOptions. + + Returns + ------- + CubeList + """ + if isinstance(cubes, CubeList): + cubelist = cubes + else: + cubelist = CubeList(cubes) + + sequence = options["merge_concat_sequence"] + while True: + n_original_cubes = len(cubelist) + + if sequence[0] == "c": + # concat if it comes first + cubelist = cubelist.concatenate() + if "m" in sequence: + # merge if requested + # NOTE: this needs "unique=False" to make "iris.load()" work correctly. + # TODO: make configurable via options. + cubelist = cubelist.merge(unique=False) + if sequence[-1] == "c": + # concat if it comes last + cubelist = cubelist.concatenate() + + # Repeat if requested, *and* this step reduced the number of cubes + if not options["repeat_until_unchanged"] or len(cubelist) >= n_original_cubes: + break + + return cubelist + + +def _combine_load_cubes(cubes): + # A special version to call _combine_cubes_inner while also implementing the + # _MULTIREF_DETECTION behaviour + options = LOAD_POLICY.settings() + if ( + options["support_multiple_references"] + and "c" not in options["merge_concat_sequence"] + ): + # Add a concatenate to implement the "multiref triggers concatenate" mechanism + from iris.fileformats.rules import _MULTIREF_DETECTION + + if _MULTIREF_DETECTION.found_multiple_refs: + options["merge_concat_sequence"] += "c" + + return _combine_cubes_inner(cubes, options) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index bb11f65440..4357c7b498 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -60,89 +60,6 @@ XML_NAMESPACE_URI = "urn:x-iris:cubeml-0.2" -class _CubeFilter: - """A constraint, paired with a list of cubes matching that constraint.""" - - def __init__(self, constraint, cubes=None): - self.constraint = constraint - if cubes is None: - cubes = CubeList() - self.cubes = cubes - - def __len__(self): - return len(self.cubes) - - def add(self, cube): - """Add the appropriate (sub)cube to the list of cubes where it matches the constraint.""" - sub_cube = self.constraint.extract(cube) - if sub_cube is not None: - self.cubes.append(sub_cube) - - def combined(self, unique=False): - """Return a new :class:`_CubeFilter` by combining the list of cubes. - - Combines the list of cubes with :func:`~iris._combine_load_cubes`. - - Parameters - ---------- - unique : bool, default=False - If True, raises `iris.exceptions.DuplicateDataError` if - duplicate cubes are detected. - - """ - from iris import _combine_load_cubes - - return _CubeFilter( - self.constraint, - _combine_load_cubes(self.cubes, merge_require_unique=unique), - ) - - -class _CubeFilterCollection: - """A list of _CubeFilter instances.""" - - @staticmethod - def from_cubes(cubes, constraints=None): - """Create a new collection from an iterable of cubes, and some optional constraints.""" - constraints = iris._constraints.list_of_constraints(constraints) - pairs = [_CubeFilter(constraint) for constraint in constraints] - collection = _CubeFilterCollection(pairs) - for c in cubes: - collection.add_cube(c) - return collection - - def __init__(self, pairs): - self.pairs = pairs - - def add_cube(self, cube): - """Add the given :class:`~iris.cube.Cube` to all of the relevant constraint pairs.""" - for pair in self.pairs: - pair.add(cube) - - def cubes(self): - """Return all the cubes in this collection in a single :class:`CubeList`.""" - from iris.cube import CubeList - - result = CubeList() - for pair in self.pairs: - result.extend(pair.cubes) - return result - - def combined(self, unique=False): - """Return a new :class:`_CubeFilterCollection` by combining all the cube lists of this collection. - - Combines each list of cubes using :func:`~iris._combine_load_cubes`. - - Parameters - ---------- - unique : bool, default=False - If True, raises `iris.exceptions.DuplicateDataError` if - duplicate cubes are detected. - - """ - return _CubeFilterCollection([pair.combined(unique) for pair in self.pairs]) - - class CubeList(list): """All the functionality of a standard :class:`list` with added "Cube" context.""" diff --git a/lib/iris/loading.py b/lib/iris/loading.py new file mode 100644 index 0000000000..95d8e8c584 --- /dev/null +++ b/lib/iris/loading.py @@ -0,0 +1,394 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Iris file loading support.""" + +# +# N.B. it is not currently possible to properly typehint the loading functions, +# since we are obliged for backwards-compatibilty to import and expose them in the +# iris main module API, but importing iris.cube here will cause a circular import. +# + +import contextlib +import itertools +from typing import Iterable + +from iris._combine import CombineOptions + + +def _generate_cubes(uris, callback, constraints): + import iris.io + + """Return a generator of cubes given the URIs and a callback.""" + if isinstance(uris, str) or not isinstance(uris, Iterable): + # Make a string, or other single item, into an iterable. + uris = [uris] + + # Group collections of uris by their iris handler + # Create list of tuples relating schemes to part names + uri_tuples = sorted(iris.io.decode_uri(uri) for uri in uris) + + for scheme, groups in itertools.groupby(uri_tuples, key=lambda x: x[0]): + # Call each scheme handler with the appropriate URIs + if scheme == "file": + part_names = [x[1] for x in groups] + for cube in iris.io.load_files(part_names, callback, constraints): + yield cube + elif scheme in ["http", "https"]: + urls = [":".join(x) for x in groups] + for cube in iris.io.load_http(urls, callback): + yield cube + elif scheme == "data": + data_objects = [x[1] for x in groups] + for cube in iris.io.load_data_objects(data_objects, callback): + yield cube + else: + raise ValueError("Iris cannot handle the URI scheme: %s" % scheme) + + +class _CubeFilter: + """A constraint, paired with a list of cubes matching that constraint.""" + + def __init__(self, constraint, cubes=None): + from iris.cube import CubeList + + self.constraint = constraint + if cubes is None: + cubes = CubeList() + self.cubes = cubes + + def __len__(self): + return len(self.cubes) + + def add(self, cube): + """Add the appropriate (sub)cube to the list of cubes where it matches the constraint.""" + sub_cube = self.constraint.extract(cube) + if sub_cube is not None: + self.cubes.append(sub_cube) + + def combined(self): + """Return a new :class:`_CubeFilter` by combining the list of cubes. + + Combines the list of cubes with :func:`~iris._combine_load_cubes`. + + """ + from iris._combine._combine_functions import _combine_load_cubes + + return _CubeFilter( + self.constraint, + _combine_load_cubes(self.cubes), + ) + + +class _CubeFilterCollection: + """A list of _CubeFilter instances.""" + + @staticmethod + def from_cubes(cubes, constraints=None): + """Create a new collection from an iterable of cubes, and some optional constraints.""" + from iris._constraints import list_of_constraints + + constraints = list_of_constraints(constraints) + pairs = [_CubeFilter(constraint) for constraint in constraints] + collection = _CubeFilterCollection(pairs) + for c in cubes: + collection.add_cube(c) + return collection + + def __init__(self, pairs): + self.pairs = pairs + + def add_cube(self, cube): + """Add the given :class:`~iris.cube.Cube` to all of the relevant constraint pairs.""" + for pair in self.pairs: + pair.add(cube) + + def cubes(self): + """Return all the cubes in this collection in a single :class:`CubeList`.""" + from iris.cube import CubeList + + result = CubeList() + for pair in self.pairs: + result.extend(pair.cubes) + return result + + def combined(self): + """Return a new :class:`_CubeFilterCollection` by combining all the cube lists of this collection. + + Combines each list of cubes using :func:`~iris._combine_load_cubes`. + + """ + return _CubeFilterCollection([pair.combined() for pair in self.pairs]) + + +def _load_collection(uris, constraints=None, callback=None): + import iris.exceptions + from iris.fileformats.rules import _MULTIREF_DETECTION + + try: + # This routine is called once per iris load operation. + # Control of the "multiple refs" handling is implicit in this routine + # NOTE: detection of multiple reference fields, and it's enabling of post-load + # concatenation, is triggered **per-load, not per-cube** + # This behaves unexpectedly for "iris.load_cubes" : a post-concatenation is + # triggered for all cubes or none, not per-cube (i.e. per constraint). + _MULTIREF_DETECTION.found_multiple_refs = False + + cubes = _generate_cubes(uris, callback, constraints) + result = _CubeFilterCollection.from_cubes(cubes, constraints) + except EOFError as e: + raise iris.exceptions.TranslationError( + "The file appears empty or incomplete: {!r}".format(str(e)) + ) + return result + + +def load(uris, constraints=None, callback=None): + """Load any number of Cubes for each constraint. + + For a full description of the arguments, please see the module + documentation for :mod:`iris`. + + Parameters + ---------- + uris : str or :class:`pathlib.PurePath` + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. + constraints : optional + One or more constraints. + callback : optional + A modifier/filter function. + + Returns + ------- + :class:`iris.cube.CubeList` + An :class:`iris.cube.CubeList`. Note that there is no inherent order + to this :class:`iris.cube.CubeList` and it should be treated as if it + were random. + + """ + cubes = _load_collection(uris, constraints, callback).combined().cubes() + return cubes + + +def load_cube(uris, constraint=None, callback=None): + """Load a single cube. + + For a full description of the arguments, please see the module + documentation for :mod:`iris`. + + Parameters + ---------- + uris : + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. + constraints : optional + A constraint. + callback : optional + A modifier/filter function. + + Returns + ------- + :class:`iris.cube.Cube` + + """ + import iris._constraints + import iris.exceptions + + constraints = iris._constraints.list_of_constraints(constraint) + if len(constraints) != 1: + raise ValueError("only a single constraint is allowed") + + cubes = _load_collection(uris, constraints, callback).combined().cubes() + + try: + # NOTE: this call currently retained to preserve the legacy exceptions + # TODO: replace with simple testing to duplicate the relevant error cases + cube = cubes.merge_cube() + except iris.exceptions.MergeError as e: + raise iris.exceptions.ConstraintMismatchError(str(e)) + except ValueError: + raise iris.exceptions.ConstraintMismatchError("no cubes found") + + return cube + + +def load_cubes(uris, constraints=None, callback=None): + """Load exactly one Cube for each constraint. + + For a full description of the arguments, please see the module + documentation for :mod:`iris`. + + Parameters + ---------- + uris : + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. + constraints : optional + One or more constraints. + callback : optional + A modifier/filter function. + + Returns + ------- + :class:`iris.cube.CubeList` + An :class:`iris.cube.CubeList`. Note that there is no inherent order + to this :class:`iris.cube.CubeList` and it should be treated as if it + were random. + + """ + import iris.exceptions + + # Merge the incoming cubes + collection = _load_collection(uris, constraints, callback).combined() + + # Make sure we have exactly one merged cube per constraint + bad_pairs = [pair for pair in collection.pairs if len(pair) != 1] + if bad_pairs: + fmt = " {} -> {} cubes" + bits = [fmt.format(pair.constraint, len(pair)) for pair in bad_pairs] + msg = "\n" + "\n".join(bits) + raise iris.exceptions.ConstraintMismatchError(msg) + + return collection.cubes() + + +def load_raw(uris, constraints=None, callback=None): + """Load non-merged cubes. + + This function is provided for those occasions where the automatic + combination of cubes into higher-dimensional cubes is undesirable. + However, it is intended as a tool of last resort! If you experience + a problem with the automatic combination process then please raise + an issue with the Iris developers. + + For a full description of the arguments, please see the module + documentation for :mod:`iris`. + + Parameters + ---------- + uris : + One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. + constraints : optional + One or more constraints. + callback : optional + A modifier/filter function. + + Returns + ------- + :class:`iris.cube.CubeList` + + """ + from iris.fileformats.um._fast_load import _raw_structured_loading + + with _raw_structured_loading(): + return _load_collection(uris, constraints, callback).cubes() + + +class LoadPolicy(CombineOptions): + """A control object for Iris loading options. + + Incorporates all the settings of a :class:`~iris.CombineOptions`, and adds the + ``support_multiple_references`` control. + + IN addition to controlling "combine" operation during loading, this also controls + the detection and handling of cases where a hybrid coordinate uses multiple + reference fields during loading : for example, a UM file which contains a series of + fields describing a time-varying orography. + + Options can be set directly, or via :meth:`~iris.LoadPolicy.set`, or changed for + the scope of a code block with :meth:`~iris.LoadPolicy.context`. + + .. note :: + + The default behaviour will "fix" loading for cases like the time-varying + orography case described above. However, this is not strictly + backwards-compatible. If this causes problems, you can force identical loading + behaviour to earlier Iris versions with ``LOAD_POLICY.set("legacy")`` or + equivalent. + + .. testsetup:: + + from iris import LOAD_POLICY + + Examples + -------- + >>> LOAD_POLICY.set("legacy") + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=False, merge_concat_sequence='m', repeat_until_unchanged=False) + >>> LOAD_POLICY.support_multiple_references = True + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='m', repeat_until_unchanged=False) + >>> LOAD_POLICY.set(merge_concat_sequence="cm") + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) + >>> with LOAD_POLICY.context("comprehensive"): + ... print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='mc', repeat_until_unchanged=True) + >>> print(LOAD_POLICY) + LoadPolicy(support_multiple_references=True, merge_concat_sequence='cm', repeat_until_unchanged=False) + + """ + + OPTION_KEYS = ("support_multiple_references",) + CombineOptions.OPTION_KEYS + # allowed values are as for CombineOptions, plus boolean values for multiple-refs + _OPTIONS_ALLOWED_VALUES = dict( + list(CombineOptions._OPTIONS_ALLOWED_VALUES.items()) + + [("support_multiple_references", (True, False))] + ) + # Settings are as for CombineOptions, but all with multiple load references enabled + SETTINGS = { + key: dict(list(settings.items()) + [("support_multiple_references", True)]) + for key, settings in CombineOptions.SETTINGS.items() + } + + @contextlib.contextmanager + def context(self, settings=None, **kwargs): + """Return a context manager applying given options. + + Parameters + ---------- + settings : str or dict + Options dictionary or name, as for :meth:`~LoadPolicy.set`. + kwargs : dict + Option values, as for :meth:`~LoadPolicy.set`. + + Examples + -------- + .. testsetup:: + + import iris + from iris import LOAD_POLICY, sample_data_path + + >>> # Show how a CombineOptions acts in the context of a load operation + >>> path = sample_data_path("time_varying_hybrid_height", "*.pp") + >>> # "legacy" load behaviour allows merge but not concatenate + >>> with LOAD_POLICY.context("legacy"): + ... cubes = iris.load(path, "x_wind") + >>> print(cubes) + 0: x_wind / (m s-1) (time: 2; model_level_number: 5; latitude: 144; longitude: 192) + 1: x_wind / (m s-1) (time: 12; model_level_number: 5; latitude: 144; longitude: 192) + 2: x_wind / (m s-1) (model_level_number: 5; latitude: 144; longitude: 192) + >>> + >>> # "recommended" behaviour enables concatenation also + >>> with LOAD_POLICY.context("recommended"): + ... cubes = iris.load(path, "x_wind") + >>> print(cubes) + 0: x_wind / (m s-1) (model_level_number: 5; time: 15; latitude: 144; longitude: 192) + """ + # Save the current state + saved_settings = self.settings() + + # Apply the new options and execute the context + try: + self.set(settings, **kwargs) + yield + finally: + # Re-establish the former state + self.set(saved_settings) + + +#: A control object containing the current file loading strategy options. +LOAD_POLICY = LoadPolicy() diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py index 69d2d229e1..78ab1b20c3 100644 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ b/lib/iris/tests/unit/common/lenient/test__qualname.py @@ -34,7 +34,7 @@ def test_callable_function(self): import iris result = _qualname(iris.load) - assert result == "iris.load" + assert result == "iris.loading.load" def test_callable_method_local(self): class MyClass: diff --git a/lib/iris/tests/unit/fileformats/test_load_functions.py b/lib/iris/tests/unit/fileformats/test_load_functions.py index 3c3d361080..5f353e4910 100644 --- a/lib/iris/tests/unit/fileformats/test_load_functions.py +++ b/lib/iris/tests/unit/fileformats/test_load_functions.py @@ -20,6 +20,7 @@ import iris from iris.coords import AuxCoord, DimCoord from iris.cube import Cube +import iris.loading _time_unit = "days since 2001-01-01" @@ -64,14 +65,14 @@ def loadfunc_name(request): def run_testcase(input_cubes, loadfunc_name, constraints=None): - loadfunc = getattr(iris, loadfunc_name) + loadfunc = getattr(iris.loading, loadfunc_name) def mock_generate_cubes(uris, callback, constraints): for cube in input_cubes: yield cube try: - with mock.patch("iris._generate_cubes", mock_generate_cubes): + with mock.patch("iris.loading._generate_cubes", mock_generate_cubes): result = loadfunc(input_cubes, constraints) except Exception as e: result = e diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py index 96d790db2d..4b0e1769e3 100644 --- a/lib/iris/tests/unit/io/test__generate_cubes.py +++ b/lib/iris/tests/unit/io/test__generate_cubes.py @@ -10,7 +10,7 @@ from pathlib import Path -import iris +from iris.loading import _generate_cubes class TestGenerateCubes(tests.IrisTest): @@ -28,7 +28,7 @@ def test_pathlib_paths(self): for gc_arg, du_arg in test_variants: decode_uri_mock.reset_mock() - list(iris._generate_cubes(gc_arg, None, None)) + list(_generate_cubes(gc_arg, None, None)) decode_uri_mock.assert_called_with(du_arg) diff --git a/lib/iris/tests/unit/test_combine_cubes.py b/lib/iris/tests/unit/test_combine_cubes.py index e159582497..7ff3a8dec9 100644 --- a/lib/iris/tests/unit/test_combine_cubes.py +++ b/lib/iris/tests/unit/test_combine_cubes.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :func:`iris.io.loading.combine_cubes` function. +"""Unit tests for the :func:`iris.loading.combine_cubes` function. Note: These tests are fairly extensive to cover functional uses within the loading operations. @@ -12,22 +12,23 @@ import pytest -from iris import LoadPolicy, _combine_cubes +from iris._combine import CombineOptions from iris.tests.unit.fileformats.test_load_functions import cu +from iris.util import combine_cubes -@pytest.fixture(params=list(LoadPolicy.SETTINGS.keys())) +@pytest.fixture(params=list(CombineOptions.SETTINGS.keys())) def options(request): # N.B. "request" is a standard PyTest fixture return request.param # Return the name of the attribute to test. -# Interface to convert settings-name / kwargs into an options dict, -# TODO: remove this wrapper when the API of "combine_cubes" is opened up. -def combine_cubes(cubes, settings_name="default", **kwargs): - options = LoadPolicy.SETTINGS[settings_name] - options.update(kwargs) - return _combine_cubes(cubes, options, merge_require_unique=False) +# # Interface to convert settings-name / kwargs into an options dict, +# # TODO: remove this wrapper when the API of "combine_cubes" is opened up. +# def combine_cubes(cubes, settings_name="default", **kwargs): +# options = LoadPolicy.SETTINGS[settings_name] +# options.update(kwargs) +# return _combine_cubes(cubes, options) class Test: diff --git a/lib/iris/util.py b/lib/iris/util.py index dfefb504e9..1722997d4c 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -15,7 +15,7 @@ import os.path import sys import tempfile -from typing import Literal +from typing import List, Literal from warnings import warn import cf_units @@ -23,6 +23,7 @@ import numpy as np import numpy.ma as ma +import iris from iris._deprecation import warn_deprecated from iris._lazy_data import is_lazy_data, is_lazy_masked_data from iris._shapefiles import create_shapefile_mask @@ -2320,3 +2321,70 @@ def equalise_cubes( # Return a CubeList result = the *original* cubes, as modified result = CubeList(cubes) return result + + +def combine_cubes( + cubes: List[iris.cube.Cube], + options: str | dict | None = None, + **kwargs, +): + """Combine cubes, according to "combine options". + + Applies a combination of :meth:`~iris.cube.CubeList.merge` and/or + :meth:`~iris.cube.CubeList.concatenate` steps to the given cubes, + as determined by the given settings (from 'options' and 'kwargs'). + + Parameters + ---------- + cubes : list of :class:`~iris.cube.Cube` + A list of cubes to combine. + + options : str or dict, optional + Name of one of the :class:`iris.CombineOptions.SETTINGS`, or a dictionary of + settings options, as described for :class:`~iris.CombineOptions`. + Defaults to the current state of :data:`iris.LOAD_POLICY`. + + kwargs : dict + Individual option setting values. These take precedence over those defined by + the 'options' arg, as described for :meth:`~iris.CombineOptions.set`. + + Returns + ------- + :class:`~iris.cube.CubeList` + + .. Note:: + The ``support_multiple_references`` keyword/property has *no* effect on + :func:`combine_cubes` : this only acts during load operations. + + Examples + -------- + >>> results = combine_cubes(cubes) + >>> results = combine_cubes(cubes, options=CombineOptions("recommended")) + >>> results = combine_cubes(cubes, repeat_until_unchanged=True) + + """ + # TODO: somehow make a real + useful example + + from iris import LOAD_POLICY, CombineOptions + from iris._combine._combine_functions import _combine_cubes_inner + + err = None + opts_dict = {} + if options is None: + opts_dict = LOAD_POLICY.settings().copy() + elif isinstance(options, str): + if options in CombineOptions.SETTINGS: + opts_dict = CombineOptions.SETTINGS[options].copy() + else: + err = ( + "Unrecognised settings name : expected one of " + f"{tuple(CombineOptions.SETTINGS)}." + ) + + if err: + raise ValueError(err) + + if kwargs is not None: + opts_dict.update(kwargs) + + return _combine_cubes_inner(cubes, opts_dict)