diff --git a/.cirrus.yml b/.cirrus.yml index b728e31867..b837436696 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -38,7 +38,7 @@ env: # Conda packages to be installed. CONDA_CACHE_PACKAGES: "nox pip" # Git commit hash for iris test data. - IRIS_TEST_DATA_VERSION: "2.0.0" + IRIS_TEST_DATA_VERSION: "2.2" # Base directory for the iris-test-data. IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data @@ -193,4 +193,4 @@ linkcheck_task: - mkdir -p ${MPL_RC_DIR} - echo "backend : agg" > ${MPL_RC_FILE} - echo "image.cmap : viridis" >> ${MPL_RC_FILE} - - nox --session linkcheck -- --verbose + - nox --session linkcheck -- --verbose \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in index 62f9dc701b..52492b17b2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,6 +4,7 @@ include CHANGES COPYING COPYING.LESSER # Files from setup.py package_data that are not automatically added to source distributions recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json recursive-include lib/iris/etc * +include lib/iris/tests/stock/file_headers/* recursive-include requirements * diff --git a/docs/src/sphinxext/generate_package_rst.py b/docs/src/sphinxext/generate_package_rst.py index ec153c768f..8f4119944f 100644 --- a/docs/src/sphinxext/generate_package_rst.py +++ b/docs/src/sphinxext/generate_package_rst.py @@ -13,7 +13,7 @@ # list of tuples for modules to exclude. Useful if the documentation throws # warnings, especially for experimental modules. exclude_modules = [ - ("experimental/raster", "iris.experimental.raster") # gdal conflicts + ("experimental/raster", "iris.experimental.raster"), # gdal conflicts ] diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index 4eb016078e..d62a056f33 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -23,9 +23,9 @@ Collapsing Entire Data Dimensions In the :doc:`subsetting_a_cube` section we saw how to extract a subset of a cube in order to reduce either its dimensionality or its resolution. -Instead of simply extracting a sub-region of the data, -we can produce statistical functions of the data values -across a particular dimension, +Instead of simply extracting a sub-region of the data, +we can produce statistical functions of the data values +across a particular dimension, such as a 'mean over time' or 'minimum over latitude'. .. _cube-statistics_forecast_printout: @@ -57,9 +57,9 @@ For instance, suppose we have a cube: um_version: 7.3 -In this case we have a 4 dimensional cube; -to mean the vertical (z) dimension down to a single valued extent -we can pass the coordinate name and the aggregation definition to the +In this case we have a 4 dimensional cube; +to mean the vertical (z) dimension down to a single valued extent +we can pass the coordinate name and the aggregation definition to the :meth:`Cube.collapsed() ` method: >>> import iris.analysis @@ -88,8 +88,8 @@ we can pass the coordinate name and the aggregation definition to the mean: model_level_number -Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` -can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list +Similarly other analysis operators such as ``MAX``, ``MIN`` and ``STD_DEV`` +can be used instead of ``MEAN``, see :mod:`iris.analysis` for a full list of currently supported operators. For an example of using this functionality, the @@ -103,14 +103,14 @@ in the gallery takes a zonal mean of an ``XYT`` cube by using the Area Averaging ^^^^^^^^^^^^^^ -Some operators support additional keywords to the ``cube.collapsed`` method. -For example, :func:`iris.analysis.MEAN ` supports -a weights keyword which can be combined with +Some operators support additional keywords to the ``cube.collapsed`` method. +For example, :func:`iris.analysis.MEAN ` supports +a weights keyword which can be combined with :func:`iris.analysis.cartography.area_weights` to calculate an area average. -Let's use the same data as was loaded in the previous example. -Since ``grid_latitude`` and ``grid_longitude`` were both point coordinates -we must guess bound positions for them +Let's use the same data as was loaded in the previous example. +Since ``grid_latitude`` and ``grid_longitude`` were both point coordinates +we must guess bound positions for them in order to calculate the area of the grid boxes:: import iris.analysis.cartography @@ -155,24 +155,24 @@ including an example on taking a :ref:`global area-weighted mean Partially Reducing Data Dimensions ---------------------------------- -Instead of completely collapsing a dimension, other methods can be applied -to reduce or filter the number of data points of a particular dimension. +Instead of completely collapsing a dimension, other methods can be applied +to reduce or filter the number of data points of a particular dimension. Aggregation of Grouped Data ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The :meth:`Cube.aggregated_by ` operation -combines data for all points with the same value of a given coordinate. -To do this, you need a coordinate whose points take on only a limited set -of different values -- the *number* of these then determines the size of the +The :meth:`Cube.aggregated_by ` operation +combines data for all points with the same value of a given coordinate. +To do this, you need a coordinate whose points take on only a limited set +of different values -- the *number* of these then determines the size of the reduced dimension. -The :mod:`iris.coord_categorisation` module can be used to make such -'categorical' coordinates out of ordinary ones: The most common use is -to aggregate data over regular *time intervals*, +The :mod:`iris.coord_categorisation` module can be used to make such +'categorical' coordinates out of ordinary ones: The most common use is +to aggregate data over regular *time intervals*, such as by calendar month or day of the week. -For example, let's create two new coordinates on the cube +For example, let's create two new coordinates on the cube to represent the climatological seasons and the season year respectively:: import iris @@ -188,8 +188,8 @@ to represent the climatological seasons and the season year respectively:: .. note:: - The 'season year' is not the same as year number, because (e.g.) the months - Dec11, Jan12 + Feb12 all belong to 'DJF-12'. + The 'season year' is not the same as year number, because (e.g.) the months + Dec11, Jan12 + Feb12 all belong to 'DJF-12'. See :meth:`iris.coord_categorisation.add_season_year`. @@ -206,10 +206,10 @@ to represent the climatological seasons and the season year respectively:: iris.coord_categorisation.add_season_year(cube, 'time', name='season_year') annual_seasonal_mean = cube.aggregated_by( - ['clim_season', 'season_year'], + ['clim_season', 'season_year'], iris.analysis.MEAN) - + Printing this cube now shows that two extra coordinates exist on the cube: .. doctest:: aggregation @@ -238,20 +238,20 @@ These two coordinates can now be used to aggregate by season and climate-year: .. doctest:: aggregation >>> annual_seasonal_mean = cube.aggregated_by( - ... ['clim_season', 'season_year'], + ... ['clim_season', 'season_year'], ... iris.analysis.MEAN) >>> print(repr(annual_seasonal_mean)) - -The primary change in the cube is that the cube's data has been -reduced in the 'time' dimension by aggregation (taking means, in this case). -This has collected together all data points with the same values of season and + +The primary change in the cube is that the cube's data has been +reduced in the 'time' dimension by aggregation (taking means, in this case). +This has collected together all data points with the same values of season and season-year. The results are now indexed by the 19 different possible values of season and season-year in a new, reduced 'time' dimension. -We can see this by printing the first 10 values of season+year -from the original cube: These points are individual months, +We can see this by printing the first 10 values of season+year +from the original cube: These points are individual months, so adjacent ones are often in the same season: .. doctest:: aggregation @@ -271,7 +271,7 @@ so adjacent ones are often in the same season: djf 2007 djf 2007 -Compare this with the first 10 values of the new cube's coordinates: +Compare this with the first 10 values of the new cube's coordinates: All the points now have distinct season+year values: .. doctest:: aggregation @@ -294,7 +294,7 @@ All the points now have distinct season+year values: Because the original data started in April 2006 we have some incomplete seasons (e.g. there were only two months worth of data for 'mam-2006'). -In this case we can fix this by removing all of the resultant 'times' which +In this case we can fix this by removing all of the resultant 'times' which do not cover a three month period (note: judged here as > 3*28 days): .. doctest:: aggregation @@ -306,7 +306,7 @@ do not cover a three month period (note: judged here as > 3*28 days): >>> full_season_means -The final result now represents the seasonal mean temperature for 17 seasons +The final result now represents the seasonal mean temperature for 17 seasons from jja-2006 to jja-2010: .. doctest:: aggregation diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 4fcca9b4c0..d13af14d39 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -26,10 +26,6 @@ from .lenient import _qualname as qualname __all__ = [ - "SERVICES_COMBINE", - "SERVICES_DIFFERENCE", - "SERVICES_EQUAL", - "SERVICES", "AncillaryVariableMetadata", "BaseMetadata", "CellMeasureMetadata", @@ -37,11 +33,19 @@ "CubeMetadata", "DimCoordMetadata", "hexdigest", + "metadata_filter", "metadata_manager_factory", + "SERVICES", + "SERVICES_COMBINE", + "SERVICES_DIFFERENCE", + "SERVICES_EQUAL", ] # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name + +from ..util import guess_coord_axis + _TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") # Configure the logger. @@ -193,9 +197,19 @@ def func(field): return result # Note that, for strict we use "_fields" not "_members". - # The "circular" member does not participate in strict equivalence. + # TODO: refactor so that 'non-participants' can be held in their specific subclasses. + # Certain members never participate in strict equivalence, so + # are filtered out. fields = filter( - lambda field: field != "circular", self._fields + lambda field: field + not in ( + "circular", + "src_dim", + "node_dimension", + "edge_dimension", + "face_dimension", + ), + self._fields, ) result = all([func(field) for field in fields]) @@ -1338,6 +1352,149 @@ def equal(self, other, lenient=None): return super().equal(other, lenient=lenient) +def metadata_filter( + instances, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, +): + """ + Filter a collection of objects by their metadata to fit the given metadata + criteria. + + Criteria can be either specific properties or other objects with metadata + to be matched. + + Args: + + * instances: + One or more objects to be filtered. + + Kwargs: + + * item: + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is compared + against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a coordinate or metadata instance equal to that of + the desired objects e.g., :class:`~iris.coords.DimCoord` + or :class:`CoordMetadata`. + + * standard_name: + The CF standard name of the desired object. If ``None``, does not + check for ``standard_name``. + + * long_name: + An unconstrained description of the object. If ``None``, does not + check for ``long_name``. + + * var_name: + The NetCDF variable name of the desired object. If ``None``, does + not check for ``var_name``. + + * attributes: + A dictionary of attributes desired on the object. If ``None``, + does not check for ``attributes``. + + * axis: + The desired object's axis, see :func:`~iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + Returns: + A list of the objects supplied in the ``instances`` argument, limited + to only those that matched the given criteria. + + """ + name = None + obj = None + + if isinstance(item, str): + name = item + else: + obj = item + + # apply de morgan's law for one less logical operation + if not (isinstance(instances, str) or isinstance(instances, Iterable)): + instances = [instances] + + result = instances + + if name is not None: + result = [instance for instance in result if instance.name() == name] + + if standard_name is not None: + result = [ + instance + for instance in result + if instance.standard_name == standard_name + ] + + if long_name is not None: + result = [ + instance for instance in result if instance.long_name == long_name + ] + + if var_name is not None: + result = [ + instance for instance in result if instance.var_name == var_name + ] + + if attributes is not None: + if not isinstance(attributes, Mapping): + msg = ( + "The attributes keyword was expecting a dictionary " + "type, but got a %s instead." % type(attributes) + ) + raise ValueError(msg) + + def attr_filter(instance): + return all( + k in instance.attributes + and hexdigest(instance.attributes[k]) == hexdigest(v) + for k, v in attributes.items() + ) + + result = [instance for instance in result if attr_filter(instance)] + + if axis is not None: + axis = axis.upper() + + def get_axis(instance): + if hasattr(instance, "axis"): + axis = instance.axis.upper() + else: + axis = guess_coord_axis(instance) + return axis + + result = [ + instance for instance in result if get_axis(instance) == axis + ] + + if obj is not None: + if hasattr(obj, "__class__") and issubclass( + obj.__class__, BaseMetadata + ): + target_metadata = obj + else: + target_metadata = obj.metadata + + result = [ + instance + for instance in result + if instance.metadata == target_metadata + ] + + return result + + def metadata_manager_factory(cls, **kwargs): """ A class instance factory function responsible for manufacturing @@ -1477,29 +1634,31 @@ def values(self): #: Convenience collection of lenient metadata combine services. -SERVICES_COMBINE = ( +# TODO: change lists back to tuples once CellMeasureMetadata is re-integrated +# here (currently in experimental.ugrid). +SERVICES_COMBINE = [ AncillaryVariableMetadata.combine, BaseMetadata.combine, CellMeasureMetadata.combine, CoordMetadata.combine, CubeMetadata.combine, DimCoordMetadata.combine, -) +] #: Convenience collection of lenient metadata difference services. -SERVICES_DIFFERENCE = ( +SERVICES_DIFFERENCE = [ AncillaryVariableMetadata.difference, BaseMetadata.difference, CellMeasureMetadata.difference, CoordMetadata.difference, CubeMetadata.difference, DimCoordMetadata.difference, -) +] #: Convenience collection of lenient metadata equality services. -SERVICES_EQUAL = ( +SERVICES_EQUAL = [ AncillaryVariableMetadata.__eq__, AncillaryVariableMetadata.equal, BaseMetadata.__eq__, @@ -1512,7 +1671,7 @@ def values(self): CubeMetadata.equal, DimCoordMetadata.__eq__, DimCoordMetadata.equal, -) +] #: Convenience collection of lenient metadata services. diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 74cedbaa00..e5c3ce0ca0 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -18,6 +18,7 @@ import zlib import cftime +import dask.array as da import numpy as np import numpy.ma as ma @@ -588,8 +589,7 @@ def xml_element(self, doc): Returns: The :class:`xml.dom.minidom.Element` that will describe this - :class:`_DimensionalMetadata`, and the dictionary of attributes - that require to be added to this element. + :class:`_DimensionalMetadata`. """ # Create the XML element as the camelCaseEquivalent of the @@ -636,6 +636,10 @@ def xml_element(self, doc): # otherwise. if isinstance(self, Coord): values_term = "points" + # TODO: replace with isinstance(self, Connectivity) once Connectivity + # is re-integrated here (currently in experimental.ugrid). + elif hasattr(self, "indices"): + values_term = "indices" else: values_term = "data" element.setAttribute(values_term, self._xml_array_repr(self._values)) @@ -1940,7 +1944,6 @@ def collapsed(self, dims_to_collapse=None): Replaces the points & bounds with a simple bounded region. """ - import dask.array as da # Ensure dims_to_collapse is a tuple to be able to pass # through to numpy @@ -2266,8 +2269,7 @@ def xml_element(self, doc): Returns: The :class:`xml.dom.minidom.Element` that will describe this - :class:`DimCoord`, and the dictionary of attributes that require - to be added to this element. + :class:`DimCoord`. """ # Create the XML element as the camelCaseEquivalent of the diff --git a/lib/iris/cube.py b/lib/iris/cube.py index dc6f481d3c..d36387a8dc 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -10,13 +10,7 @@ """ from collections import OrderedDict -from collections.abc import ( - Container, - Iterable, - Iterator, - Mapping, - MutableMapping, -) +from collections.abc import Container, Iterable, Iterator, MutableMapping import copy from copy import deepcopy from functools import partial, reduce @@ -37,13 +31,8 @@ from iris.analysis.cartography import wrap_lons import iris.analysis.maths import iris.aux_factory -from iris.common import ( - CFVariableMixin, - CoordMetadata, - CubeMetadata, - DimCoordMetadata, - metadata_manager_factory, -) +from iris.common import CFVariableMixin, CubeMetadata, metadata_manager_factory +from iris.common.metadata import metadata_filter import iris.coord_systems import iris.coords import iris.exceptions @@ -859,7 +848,7 @@ def __init__( * long_name An unconstrained description of the cube. * var_name - The netCDF variable name for the cube. + The NetCDF variable name for the cube. * units The unit of the cube, e.g. ``"m s-1"`` or ``"kelvin"``. * attributes @@ -916,7 +905,7 @@ def __init__( #: The "long name" for the Cube's phenomenon. self.long_name = long_name - #: The netCDF variable name for the Cube. + #: The NetCDF variable name for the Cube. self.var_name = var_name self.cell_methods = cell_methods @@ -1143,6 +1132,44 @@ def _check_multi_dim_metadata(self, metadata, data_dims): def _add_unique_aux_coord(self, coord, data_dims): data_dims = self._check_multi_dim_metadata(coord, data_dims) + if hasattr(coord, "mesh"): + mesh = self.mesh + if mesh: + msg = ( + "{item} of Meshcoord {coord!r} is " + "{thisval!r}, which does not match existing " + "cube {item} of {ownval!r}." + ) + if coord.mesh != mesh: + raise ValueError( + msg.format( + item="mesh", + coord=coord, + thisval=coord.mesh, + ownval=mesh, + ) + ) + location = self.location + if coord.location != location: + raise ValueError( + msg.format( + item="location", + coord=coord, + thisval=coord.location, + ownval=location, + ) + ) + mesh_dims = (self.mesh_dim(),) + if data_dims != mesh_dims: + raise ValueError( + msg.format( + item="mesh dimension", + coord=coord, + thisval=data_dims, + ownval=mesh_dims, + ) + ) + self._aux_coords_and_dims.append((coord, data_dims)) def add_aux_factory(self, aux_factory): @@ -1525,7 +1552,7 @@ def aux_factory( An unconstrained description of the coordinate factory. If None, does not check for long_name. * var_name - The netCDF variable name of the desired coordinate factory. + The NetCDF variable name of the desired coordinate factory. If None, does not check for var_name. .. note:: @@ -1593,69 +1620,82 @@ def coords( dimensions=None, coord_system=None, dim_coords=None, + mesh_coords=None, ): """ - Return a list of coordinates in this cube fitting the given criteria. + Return a list of coordinates from the :class:`Cube` that match the + provided criteria. + + .. seealso:: + + :meth:`Cube.coord` for matching exactly one coordinate. Kwargs: - * name_or_coord - Either + * name_or_coord: + Either, - (a) a :attr:`standard_name`, :attr:`long_name`, or - :attr:`var_name`. Defaults to value of `default` - (which itself defaults to `unknown`) as defined in - :class:`iris.common.CFVariableMixin`. + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. - (b) a coordinate instance with metadata equal to that of - the desired coordinates. Accepts either a - :class:`iris.coords.DimCoord`, :class:`iris.coords.AuxCoord`, - :class:`iris.aux_factory.AuxCoordFactory`, - :class:`iris.common.CoordMetadata` or - :class:`iris.common.DimCoordMetadata`. - * standard_name - The CF standard name of the desired coordinate. If None, does not - check for standard name. - * long_name - An unconstrained description of the coordinate. If None, does not - check for long_name. - * var_name - The netCDF variable name of the desired coordinate. If None, does - not check for var_name. - * attributes - A dictionary of attributes desired on the coordinates. If None, - does not check for attributes. - * axis - The desired coordinate axis, see - :func:`iris.util.guess_coord_axis`. If None, does not check for - axis. Accepts the values 'X', 'Y', 'Z' and 'T' (case-insensitive). - * contains_dimension - The desired coordinate contains the data dimension. If None, does + * a coordinate or metadata instance equal to that of the desired + coordinate e.g., :class:`~iris.coords.DimCoord` or + :class:`~iris.common.metadata.CoordMetadata`. + + * standard_name: + The CF standard name of the desired coordinate. If ``None``, does not + check for ``standard name``. + + * long_name: + An unconstrained description of the coordinate. If ``None``, does not + check for ``long_name``. + + * var_name: + The NetCDF variable name of the desired coordinate. If ``None``, does + not check for ``var_name``. + + * attributes: + A dictionary of attributes desired on the coordinates. If ``None``, + does not check for ``attributes``. + + * axis: + The desired coordinate axis, see :func:`iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + * contains_dimension: + The desired coordinate contains the data dimension. If ``None``, does not check for the dimension. - * dimensions + + * dimensions: The exact data dimensions of the desired coordinate. Coordinates - with no data dimension can be found with an empty tuple or list - (i.e. ``()`` or ``[]``). If None, does not check for dimensions. - * coord_system - Whether the desired coordinates have coordinate systems equal to - the given coordinate system. If None, no check is done. - * dim_coords - Set to True to only return coordinates that are the cube's - dimension coordinates. Set to False to only return coordinates - that are the cube's auxiliary and derived coordinates. If None, - returns all coordinates. + with no data dimension can be found with an empty ``tuple`` or + ``list`` i.e., ``()`` or ``[]``. If ``None``, does not check for + dimensions. + + * coord_system: + Whether the desired coordinates have a coordinate system equal to + the given coordinate system. If ``None``, no check is done. + + * dim_coords: + Set to ``True`` to only return coordinates that are the cube's + dimension coordinates. Set to ``False`` to only return coordinates + that are the cube's auxiliary, mesh and derived coordinates. + If ``None``, returns all coordinates. + + * mesh_coords: + Set to ``True`` to return only coordinates which are + :class:`~iris.experimental.ugrid.MeshCoord`\\ s. + Set to ``False`` to return only non-mesh coordinates. + If ``None``, returns all coordinates. - See also :meth:`Cube.coord()`. + Returns: + A list containing zero or more coordinates matching the provided + criteria. """ - name = None - coord = None - - if isinstance(name_or_coord, str): - name = name_or_coord - else: - coord = name_or_coord - coords_and_factories = [] if dim_coords in [True, None]: @@ -1665,62 +1705,35 @@ def coords( coords_and_factories += list(self.aux_coords) coords_and_factories += list(self.aux_factories) - if name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.name() == name - ] - - if standard_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.standard_name == standard_name - ] - - if long_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.long_name == long_name - ] - - if var_name is not None: - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.var_name == var_name - ] - - if axis is not None: - axis = axis.upper() - guess_axis = iris.util.guess_coord_axis - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if guess_axis(coord_) == axis - ] - - if attributes is not None: - if not isinstance(attributes, Mapping): - msg = ( - "The attributes keyword was expecting a dictionary " - "type, but got a %s instead." % type(attributes) - ) - raise ValueError(msg) - - def attr_filter(coord_): - return all( - k in coord_.attributes and coord_.attributes[k] == v - for k, v in attributes.items() - ) + if mesh_coords is not None: + # Select on mesh or non-mesh. + mesh_coords = bool(mesh_coords) + # Use duck typing to avoid importing from iris.experimental.ugrid, + # which could be a circular import. + if mesh_coords: + # *only* MeshCoords + coords_and_factories = [ + item + for item in coords_and_factories + if hasattr(item, "mesh") + ] + else: + # *not* MeshCoords + coords_and_factories = [ + item + for item in coords_and_factories + if not hasattr(item, "mesh") + ] - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if attr_filter(coord_) - ] + coords_and_factories = metadata_filter( + coords_and_factories, + item=name_or_coord, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + ) if coord_system is not None: coords_and_factories = [ @@ -1729,20 +1742,6 @@ def attr_filter(coord_): if coord_.coord_system == coord_system ] - if coord is not None: - if hasattr(coord, "__class__") and coord.__class__ in ( - CoordMetadata, - DimCoordMetadata, - ): - target_metadata = coord - else: - target_metadata = coord.metadata - coords_and_factories = [ - coord_ - for coord_ in coords_and_factories - if coord_.metadata == target_metadata - ] - if contains_dimension is not None: coords_and_factories = [ coord_ @@ -1793,20 +1792,84 @@ def coord( dimensions=None, coord_system=None, dim_coords=None, + mesh_coords=None, ): """ - Return a single coord given the same arguments as :meth:`Cube.coords`. + Return a single coordinate from the :class:`Cube` that matches the + provided criteria. .. note:: - If the arguments given do not result in precisely 1 coordinate - being matched, an :class:`iris.exceptions.CoordinateNotFoundError` - is raised. + If the arguments given do not result in **precisely one** coordinate, + then a :class:`~iris.exceptions.CoordinateNotFoundError` is raised. .. seealso:: - :meth:`Cube.coords()` for full keyword - documentation. + :meth:`Cube.coords` for matching zero or more coordinates. + + Kwargs: + + * name_or_coord: + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a coordinate or metadata instance equal to that of the desired + coordinate e.g., :class:`~iris.coords.DimCoord` or + :class:`~iris.common.metadata.CoordMetadata`. + + * standard_name: + The CF standard name of the desired coordinate. If ``None``, does not + check for ``standard name``. + + * long_name: + An unconstrained description of the coordinate. If ``None``, does not + check for ``long_name``. + + * var_name: + The NetCDF variable name of the desired coordinate. If ``None``, does + not check for ``var_name``. + + * attributes: + A dictionary of attributes desired on the coordinates. If ``None``, + does not check for ``attributes``. + + * axis: + The desired coordinate axis, see :func:`iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + * contains_dimension: + The desired coordinate contains the data dimension. If ``None``, does + not check for the dimension. + + * dimensions: + The exact data dimensions of the desired coordinate. Coordinates + with no data dimension can be found with an empty ``tuple`` or + ``list`` i.e., ``()`` or ``[]``. If ``None``, does not check for + dimensions. + + * coord_system: + Whether the desired coordinates have a coordinate system equal to + the given coordinate system. If ``None``, no check is done. + + * dim_coords: + Set to ``True`` to only return coordinates that are the cube's + dimension coordinates. Set to ``False`` to only return coordinates + that are the cube's auxiliary, mesh and derived coordinates. + If ``None``, returns all coordinates. + + * mesh_coords: + Set to ``True`` to return only coordinates which are + :class:`~iris.experimental.ugrid.MeshCoord`\\ s. + Set to ``False`` to return only non-mesh coordinates. + If ``None``, returns all coordinates. + + Returns: + The coordinate that matches the provided criteria. """ coords = self.coords( @@ -1823,23 +1886,22 @@ def coord( ) if len(coords) > 1: - msg = ( - "Expected to find exactly 1 coordinate, but found %s. " - "They were: %s." - % (len(coords), ", ".join(coord.name() for coord in coords)) + emsg = ( + f"Expected to find exactly 1 coordinate, but found {len(coords)}. " + f"They were: {', '.join(coord.name() for coord in coords)}." ) - raise iris.exceptions.CoordinateNotFoundError(msg) + raise iris.exceptions.CoordinateNotFoundError(emsg) elif len(coords) == 0: _name = name_or_coord if name_or_coord is not None: if not isinstance(name_or_coord, str): _name = name_or_coord.name() bad_name = _name or standard_name or long_name or "" - msg = ( - "Expected to find exactly 1 %s coordinate, but found " - "none." % bad_name + emsg = ( + f"Expected to find exactly 1 {bad_name!r} coordinate, " + "but found none." ) - raise iris.exceptions.CoordinateNotFoundError(msg) + raise iris.exceptions.CoordinateNotFoundError(emsg) return coords[0] @@ -1894,6 +1956,76 @@ def coord_system(self, spec=None): return result + def _any_meshcoord(self): + """Return a MeshCoord if there are any, else None.""" + mesh_coords = self.coords(mesh_coords=True) + if mesh_coords: + result = mesh_coords[0] + else: + result = None + return result + + @property + def mesh(self): + """ + Return the unstructured :class:`~iris.experimental.ugrid.Mesh` + associated with the cube, if the cube has any + :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + or ``None`` if it has none. + + Returns: + + * mesh (:class:`iris.experimental.ugrid.Mesh` or None): + The mesh of the cube + :class:`~iris.experimental.ugrid.MeshCoord`\\s, + or ``None``. + + """ + result = self._any_meshcoord() + if result is not None: + result = result.mesh + return result + + @property + def location(self): + """ + Return the mesh "location" of the cube data, if the cube has any + :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + or ``None`` if it has none. + + Returns: + + * location (str or None): + The mesh location of the cube + :class:`~iris.experimental.ugrid.MeshCoord`\\s + (i.e. one of 'face' / 'edge' / 'node'), + or ``None``. + + """ + result = self._any_meshcoord() + if result is not None: + result = result.location + return result + + def mesh_dim(self): + """ + Return the cube dimension of the mesh, if the cube has any + :class:`~iris.experimental.ugrid.MeshCoord`\\ s, + or ``None`` if it has none. + + Returns: + + * mesh_dim (int, or None): + the cube dimension which the cube + :class:`~iris.experimental.ugrid.MeshCoord`\\s map to, + or ``None``. + + """ + result = self._any_meshcoord() + if result is not None: + (result,) = self.coord_dims(result) # result is a 1-tuple + return result + def cell_measures(self, name_or_cell_measure=None): """ Return a list of cell measures in this cube fitting the given criteria. @@ -2140,7 +2272,7 @@ def data(self): .. note:: - Cubes obtained from netCDF, PP, and FieldsFile files will only + Cubes obtained from NetCDF, PP, and FieldsFile files will only populate this attribute on its first use. To obtain the shape of the data without causing it to be loaded, diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 1c05d13163..12d24ef70f 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -39,6 +39,12 @@ class AncillaryVariableNotFoundError(KeyError): pass +class ConnectivityNotFoundError(KeyError): + """Raised when a search yields no connectivities.""" + + pass + + class CoordinateMultiDimError(ValueError): """Raised when a routine doesn't support multi-dimensional coordinates.""" diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py deleted file mode 100644 index 0ff95a4fdf..0000000000 --- a/lib/iris/experimental/ugrid.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -""" -Ugrid functions. - -""" - -import iris - - -def ugrid(location, name): - """ - Create a cube from an unstructured grid. - - Args: - - * location: - A string whose value represents the path to a file or - URL to an OpenDAP resource conforming to the - Unstructured Grid Metadata Conventions for Scientific Datasets - https://github.com/ugrid-conventions/ugrid-conventions - - * name: - A string whose value represents a cube loading constraint of - first the standard name if found, then the long name if found, - then the variable name if found, before falling back to - the value of the default which itself defaults to "unknown" - - Returns: - An instance of :class:`iris.cube.Cube` decorated with - an instance of :class:`pyugrid.ugrid.Ugrid` - bound to an attribute of the cube called "mesh" - - """ - # Lazy import so we can build the docs with no pyugrid. - import pyugrid - - cube = iris.load_cube(location, name) - ug = pyugrid.ugrid.UGrid.from_ncfile(location) - cube.mesh = ug - cube.mesh_dimension = 1 # {0:time, 1:node} - return cube diff --git a/lib/iris/experimental/ugrid/__init__.py b/lib/iris/experimental/ugrid/__init__.py new file mode 100644 index 0000000000..144c3e4da6 --- /dev/null +++ b/lib/iris/experimental/ugrid/__init__.py @@ -0,0 +1,3822 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +""" +Infra-structure for unstructured mesh support, based on +CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/ + +""" + +from abc import ABC, abstractmethod +from collections import namedtuple +from collections.abc import Iterable +from contextlib import contextmanager +from functools import wraps +import logging +import re +import threading + +import dask.array as da +import numpy as np + +from ... import _lazy_data as _lazy +from ...common.lenient import _lenient_service as lenient_service +from ...common.metadata import ( + SERVICES, + SERVICES_COMBINE, + SERVICES_DIFFERENCE, + SERVICES_EQUAL, + BaseMetadata, + metadata_filter, + metadata_manager_factory, +) +from ...common.mixin import CFVariableMixin +from ...config import get_logger +from ...coords import AuxCoord, _DimensionalMetadata +from ...exceptions import ConnectivityNotFoundError, CoordinateNotFoundError +from ...fileformats import cf, netcdf +from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names +from ...util import guess_coord_axis + +__all__ = [ + "CFUGridReader", + "Connectivity", + "ConnectivityMetadata", + "Mesh", + "Mesh1DConnectivities", + "Mesh1DCoords", + "Mesh1DNames", + "Mesh2DConnectivities", + "Mesh2DCoords", + "Mesh2DNames", + "MeshEdgeCoords", + "MeshFaceCoords", + "MeshNodeCoords", + "MeshMetadata", + "MeshCoord", + "MeshCoordMetadata", + "ParseUGridOnLoad", + "PARSE_UGRID_ON_LOAD", +] + + +#: Numpy "threshold" printoptions default argument. +NP_PRINTOPTIONS_THRESHOLD = 10 +#: Numpy "edgeitems" printoptions default argument. +NP_PRINTOPTIONS_EDGEITEMS = 2 + + +# Configure the logger. +logger = get_logger(__name__, fmt="[%(cls)s.%(funcName)s]") + +# +# Mesh dimension names namedtuples. +# + +#: Namedtuple for 1D mesh topology NetCDF variable dimension names. +Mesh1DNames = namedtuple("Mesh1DNames", ["node_dimension", "edge_dimension"]) +#: Namedtuple for 2D mesh topology NetCDF variable dimension names. +Mesh2DNames = namedtuple( + "Mesh2DNames", ["node_dimension", "edge_dimension", "face_dimension"] +) + +# +# Mesh coordinate manager namedtuples. +# + +#: Namedtuple for 1D mesh :class:`~iris.coords.AuxCoord` coordinates. +Mesh1DCoords = namedtuple( + "Mesh1DCoords", ["node_x", "node_y", "edge_x", "edge_y"] +) +#: Namedtuple for 2D mesh :class:`~iris.coords.AuxCoord` coordinates. +Mesh2DCoords = namedtuple( + "Mesh2DCoords", + ["node_x", "node_y", "edge_x", "edge_y", "face_x", "face_y"], +) +#: Namedtuple for ``node`` :class:`~iris.coords.AuxCoord` coordinates. +MeshNodeCoords = namedtuple("MeshNodeCoords", ["node_x", "node_y"]) +#: Namedtuple for ``edge`` :class:`~iris.coords.AuxCoord` coordinates. +MeshEdgeCoords = namedtuple("MeshEdgeCoords", ["edge_x", "edge_y"]) +#: Namedtuple for ``face`` :class:`~iris.coords.AuxCoord` coordinates. +MeshFaceCoords = namedtuple("MeshFaceCoords", ["face_x", "face_y"]) + +# +# Mesh connectivity manager namedtuples. +# + +#: Namedtuple for 1D mesh :class:`Connectivity` instances. +Mesh1DConnectivities = namedtuple("Mesh1DConnectivities", ["edge_node"]) +#: Namedtuple for 2D mesh :class:`Connectivity` instances. +Mesh2DConnectivities = namedtuple( + "Mesh2DConnectivities", + [ + "face_node", + "edge_node", + "face_edge", + "face_face", + "edge_face", + "boundary_node", + ], +) + + +class Connectivity(_DimensionalMetadata): + """ + A CF-UGRID topology connectivity, describing the topological relationship + between two lists of dimensional locations. One or more connectivities + make up a CF-UGRID topology - a constituent of a CF-UGRID mesh. + + See: https://ugrid-conventions.github.io/ugrid-conventions + + """ + + UGRID_CF_ROLES = [ + "edge_node_connectivity", + "face_node_connectivity", + "face_edge_connectivity", + "face_face_connectivity", + "edge_face_connectivity", + "boundary_node_connectivity", + "volume_node_connectivity", + "volume_edge_connectivity", + "volume_face_connectivity", + "volume_volume_connectivity", + ] + + def __init__( + self, + indices, + cf_role, + standard_name=None, + long_name=None, + var_name=None, + units=None, + attributes=None, + start_index=0, + src_dim=0, + ): + """ + Constructs a single connectivity. + + Args: + + * indices (numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array): + The index values describing a topological relationship. Constructed + of 2 dimensions - the list of locations, and within each location: + the indices of the 'target locations' it relates to. + Use a :class:`numpy.ma.core.MaskedArray` if :attr:`src_location` + lengths vary - mask unused index 'slots' within each + :attr:`src_location`. Use a :class:`dask.array.Array` to keep + indices 'lazy'. + * cf_role (str): + Denotes the topological relationship that this connectivity + describes. Made up of this array's locations, and the indexed + 'target location' within each location. + See :attr:`UGRID_CF_ROLES` for valid arguments. + + Kwargs: + + * standard_name (str): + CF standard name of the connectivity. + (NOTE: this is not expected by the UGRID conventions, but will be + handled in Iris' standard way if provided). + * long_name (str): + Descriptive name of the connectivity. + * var_name (str): + The NetCDF variable name for the connectivity. + * units (cf_units.Unit): + The :class:`~cf_units.Unit` of the connectivity's values. + Can be a string, which will be converted to a Unit object. + (NOTE: this is not expected by the UGRID conventions, but will be + handled in Iris' standard way if provided). + * attributes (dict): + A dictionary containing other cf and user-defined attributes. + * start_index (int): + Either ``0`` or ``1``. Default is ``0``. Denotes whether + :attr:`indices` uses 0-based or 1-based indexing (allows support + for Fortran and legacy NetCDF files). + * src_dim (int): + Either ``0`` or ``1``. Default is ``0``. Denotes which dimension + of :attr:`indices` varies over the :attr:`src_location`'s (the + alternate dimension therefore varying within individual + :attr:`src_location`'s). (This parameter allows support for fastest varying index being + either first or last). + E.g. for ``face_node_connectivity``, for 10 faces: + ``indices.shape[src_dim] = 10``. + + """ + + def validate_arg_vs_list(arg_name, arg, valid_list): + if arg not in valid_list: + error_msg = ( + f"Invalid {arg_name} . Got: {arg} . Must be one of: " + f"{valid_list} ." + ) + raise ValueError(error_msg) + + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(ConnectivityMetadata) + + validate_arg_vs_list("start_index", start_index, [0, 1]) + # indices array will be 2-dimensional, so must be either 0 or 1. + validate_arg_vs_list("src_dim", src_dim, [0, 1]) + validate_arg_vs_list("cf_role", cf_role, Connectivity.UGRID_CF_ROLES) + + self._metadata_manager.start_index = start_index + self._metadata_manager.src_dim = src_dim + self._metadata_manager.cf_role = cf_role + + self._tgt_dim = 1 - src_dim + self._src_location, self._tgt_location = cf_role.split("_")[:2] + + super().__init__( + values=indices, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=units, + attributes=attributes, + ) + + def __repr__(self): + def kwargs_filter(k, v): + result = False + if k != "cf_role": + if v is not None: + result = True + if ( + not isinstance(v, str) + and isinstance(v, Iterable) + and not v + ): + result = False + elif k == "units" and v == "unknown": + result = False + return result + + def array2repr(array): + if self.has_lazy_indices(): + result = repr(array) + else: + with np.printoptions( + threshold=NP_PRINTOPTIONS_THRESHOLD, + edgeitems=NP_PRINTOPTIONS_EDGEITEMS, + ): + result = re.sub("\n *", " ", repr(array)) + return result + + # positional arguments + args = ", ".join( + [ + f"{array2repr(self.core_indices())}", + f"cf_role={self.cf_role!r}", + ] + ) + + # optional arguments (metadata) + kwargs = ", ".join( + [ + f"{k}={v!r}" + for k, v in self.metadata._asdict().items() + if kwargs_filter(k, v) + ] + ) + + return f"{self.__class__.__name__}({', '.join([args, kwargs])})" + + def __str__(self): + args = ", ".join( + [f"cf_role={self.cf_role!r}", f"start_index={self.start_index!r}"] + ) + return f"{self.__class__.__name__}({args})" + + @property + def _values(self): + # Overridden just to allow .setter override. + return super()._values + + @_values.setter + def _values(self, values): + self._validate_indices(values, shapes_only=True) + # The recommended way of using the setter in super(). + super(Connectivity, self.__class__)._values.fset(self, values) + + @property + def cf_role(self): + """ + The category of topological relationship that this connectivity + describes. + **Read-only** - validity of :attr:`indices` is dependent on + :attr:`cf_role`. A new :class:`Connectivity` must therefore be defined + if a different :attr:`cf_role` is needed. + + """ + return self._metadata_manager.cf_role + + @property + def src_location(self): + """ + Derived from the connectivity's :attr:`cf_role` - the first part, e.g. + ``face`` in ``face_node_connectivity``. Refers to the locations + listed by the :attr:`src_dim` of the connectivity's :attr:`indices` + array. + + """ + return self._src_location + + @property + def tgt_location(self): + """ + Derived from the connectivity's :attr:`cf_role` - the second part, e.g. + ``node`` in ``face_node_connectivity``. Refers to the locations indexed + by the values in the connectivity's :attr:`indices` array. + + """ + return self._tgt_location + + @property + def start_index(self): + """ + The base value of the connectivity's :attr:`indices` array; either + ``0`` or ``1``. + **Read-only** - validity of :attr:`indices` is dependent on + :attr:`start_index`. A new :class:`Connectivity` must therefore be + defined if a different :attr:`start_index` is needed. + + """ + return self._metadata_manager.start_index + + @property + def src_dim(self): + """ + The dimension of the connectivity's :attr:`indices` array that varies + over the connectivity's :attr:`src_location`'s. Either ``0`` or ``1``. + **Read-only** - validity of :attr:`indices` is dependent on + :attr:`src_dim`. Use :meth:`transpose` to create a new, transposed + :class:`Connectivity` if a different :attr:`src_dim` is needed. + + """ + return self._metadata_manager.src_dim + + @property + def tgt_dim(self): + """ + Derived as the alternate value of :attr:`src_dim` - each must equal + either ``0`` or ``1``. + The dimension of the connectivity's :attr:`indices` array that varies + within the connectivity's individual :attr:`src_location`'s. + + """ + return self._tgt_dim + + @property + def indices(self): + """ + The index values describing the topological relationship of the + connectivity, as a NumPy array. Masked points indicate a + :attr:`src_location` shorter than the longest :attr:`src_location` + described in this array - unused index 'slots' are masked. + **Read-only** - index values are only meaningful when combined with + an appropriate :attr:`cf_role`, :attr:`start_index` and + :attr:`src_dim`. A new :class:`Connectivity` must therefore be + defined if different indices are needed. + + """ + return self._values + + def indices_by_src(self, indices=None): + """ + Return a view of the indices array with :attr:`src_dim` **always** as + the first index - transposed if necessary. Can optionally pass in an + identically shaped array on which to perform this operation (e.g. the + output from :meth:`core_indices` or :meth:`lazy_indices`). + + Kwargs: + + * indices (array): + The array on which to operate. If ``None``, will operate on + :attr:`indices`. Default is ``None``. + + Returns: + A view of the indices array, transposed - if necessary - to put + :attr:`src_dim` first. + + """ + if indices is None: + indices = self.indices + + if indices.shape != self.shape: + raise ValueError( + f"Invalid indices provided. Must be shape={self.shape} , " + f"got shape={indices.shape} ." + ) + + if self.src_dim == 0: + result = indices + elif self.src_dim == 1: + result = indices.transpose() + else: + raise ValueError("Invalid src_dim.") + + return result + + def _validate_indices(self, indices, shapes_only=False): + # Use shapes_only=True for a lower resource, less thorough validation + # of indices by just inspecting the array shape instead of inspecting + # individual masks. So will not catch individual src_locations being + # unacceptably small. + + def indices_error(message): + raise ValueError("Invalid indices provided. " + message) + + indices = self._sanitise_array(indices, 0) + + indices_dtype = indices.dtype + if not np.issubdtype(indices_dtype, np.integer): + indices_error( + f"dtype must be numpy integer subtype, got: {indices_dtype} ." + ) + + indices_min = indices.min() + if _lazy.is_lazy_data(indices_min): + indices_min = indices_min.compute() + if indices_min < self.start_index: + indices_error( + f"Lowest index: {indices_min} < start_index: {self.start_index} ." + ) + + indices_shape = indices.shape + if len(indices_shape) != 2: + indices_error( + f"Expected 2-dimensional shape, got: shape={indices_shape} ." + ) + + len_req_fail = False + if shapes_only: + src_shape = indices_shape[self.tgt_dim] + # Wrap as lazy to allow use of the same operations below + # regardless of shapes_only. + src_lengths = _lazy.as_lazy_data(np.asarray(src_shape)) + else: + # Wouldn't be safe to use during __init__ validation, since + # lazy_src_lengths requires self.indices to exist. Safe here since + # shapes_only==False is only called manually, i.e. after + # initialisation. + src_lengths = self.lazy_src_lengths() + if self.src_location in ("edge", "boundary"): + if (src_lengths != 2).any().compute(): + len_req_fail = "len=2" + else: + if self.src_location == "face": + min_size = 3 + elif self.src_location == "volume": + if self.tgt_location == "edge": + min_size = 6 + else: + min_size = 4 + else: + raise NotImplementedError + if (src_lengths < min_size).any().compute(): + len_req_fail = f"len>={min_size}" + if len_req_fail: + indices_error( + f"Not all src_locations meet requirement: {len_req_fail} - " + f"needed to describe '{self.cf_role}' ." + ) + + def validate_indices(self): + """ + Perform a thorough validity check of this connectivity's + :attr:`indices`. Includes checking the sizes of individual + :attr:`src_location`'s (specified using masks on the + :attr:`indices` array) against the :attr:`cf_role`. + + Raises a ``ValueError`` if any problems are encountered, otherwise + passes silently. + + .. note:: + + While this uses lazy computation, it will still be a high + resource demand for a large :attr:`indices` array. + + """ + self._validate_indices(self.indices, shapes_only=False) + + def __eq__(self, other): + eq = NotImplemented + if isinstance(other, Connectivity): + # Account for the fact that other could be the transposed equivalent + # of self, which we consider 'safe' since the recommended + # interaction with the indices array is via indices_by_src, which + # corrects for this difference. (To enable this, src_dim does + # not participate in ConnectivityMetadata to ConnectivityMetadata + # equivalence). + if hasattr(other, "metadata"): + # metadata comparison + eq = self.metadata == other.metadata + if eq: + eq = ( + self.indices_by_src() == other.indices_by_src() + ).all() + return eq + + def transpose(self): + """ + Create a new :class:`Connectivity`, identical to this one but with the + :attr:`indices` array transposed and the :attr:`src_dim` value flipped. + + Returns: + A new :class:`Connectivity` that is the transposed equivalent of + the original. + + """ + new_connectivity = Connectivity( + indices=self.indices.transpose().copy(), + cf_role=self.cf_role, + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + start_index=self.start_index, + src_dim=self.tgt_dim, + ) + return new_connectivity + + def lazy_indices(self): + """ + Return a lazy array representing the connectivity's indices. + + Accessing this method will never cause the :attr:`indices` values to be + loaded. Similarly, calling methods on, or indexing, the returned Array + will not cause the connectivity to have loaded :attr:`indices`. + + If the :attr:`indices` have already been loaded for the connectivity, + the returned Array will be a new lazy array wrapper. + + Returns: + A lazy array, representing the connectivity indices array. + + """ + return super()._lazy_values() + + def core_indices(self): + """ + The indices array at the core of this connectivity, which may be a + NumPy array or a Dask array. + + Returns: + numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array + + """ + return super()._core_values() + + def has_lazy_indices(self): + """ + Return a boolean indicating whether the connectivity's :attr:`indices` + array is a lazy Dask array or not. + + Returns: + boolean + + """ + return super()._has_lazy_values() + + def lazy_src_lengths(self): + """ + Return a lazy array representing the lengths of each + :attr:`src_location` in the :attr:`src_dim` of the connectivity's + :attr:`indices` array, accounting for masks if present. + + Accessing this method will never cause the :attr:`indices` values to be + loaded. Similarly, calling methods on, or indexing, the returned Array + will not cause the connectivity to have loaded :attr:`indices`. + + The returned Array will be lazy regardless of whether the + :attr:`indices` have already been loaded. + + Returns: + A lazy array, representing the lengths of each :attr:`src_location`. + + """ + src_mask_counts = da.sum( + da.ma.getmaskarray(self.indices), axis=self.tgt_dim + ) + max_src_size = self.indices.shape[self.tgt_dim] + return max_src_size - src_mask_counts + + def src_lengths(self): + """ + Return a NumPy array representing the lengths of each + :attr:`src_location` in the :attr:`src_dim` of the connectivity's + :attr:`indices` array, accounting for masks if present. + + Returns: + A NumPy array, representing the lengths of each :attr:`src_location`. + + """ + return self.lazy_src_lengths().compute() + + def cube_dims(self, cube): + """Not available on :class:`Connectivity`.""" + raise NotImplementedError + + def xml_element(self, doc): + # Create the XML element as the camelCaseEquivalent of the + # class name + element = super().xml_element(doc) + + element.setAttribute("cf_role", self.cf_role) + element.setAttribute("start_index", self.start_index) + element.setAttribute("src_dim", self.src_dim) + + return element + + +class ConnectivityMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.experimental.ugrid.Connectivity`. + + """ + + # The "src_dim" member is stateful only, and does not participate in + # lenient/strict equivalence. + _members = ("cf_role", "start_index", "src_dim") + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for connectivities. + + Args: + + * other (ConnectivityMetadata): + The other connectivity metadata participating in the lenient + combination. + + Returns: + A list of combined metadata member values. + + """ + # Perform "strict" combination for "cf_role", "start_index", "src_dim". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in ConnectivityMetadata._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for connectivities. + + Args: + + * other (ConnectivityMetadata): + The other connectivity metadata participating in the lenient + comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "cf_role", "start_index". + # The "src_dim" member is not part of lenient equivalence. + members = filter( + lambda member: member != "src_dim", ConnectivityMetadata._members + ) + result = all( + [ + getattr(self, field) == getattr(other, field) + for field in members + ] + ) + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for connectivities. + + Args: + + * other (ConnectivityMetadata): + The other connectivity metadata participating in the lenient + difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "cf_role", "start_index", "src_dim". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in ConnectivityMetadata._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + +class MeshMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.experimental.ugrid.Mesh`. + + """ + + # The node_dimension", "edge_dimension" and "face_dimension" members are + # stateful only; they not participate in lenient/strict equivalence. + _members = ( + "topology_dimension", + "node_dimension", + "edge_dimension", + "face_dimension", + ) + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + combination. + + Returns: + A list of combined metadata member values. + + """ + + # Perform "strict" combination for "topology_dimension", + # "node_dimension", "edge_dimension" and "face_dimension". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in MeshMetadata._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for "topology_dimension". + # "node_dimension", "edge_dimension" and "face_dimension" are not part + # of lenient equivalence at all. + result = self.topology_dimension == other.topology_dimension + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for meshes. + + Args: + + * other (MeshMetadata): + The other mesh metadata participating in the lenient + difference. + + Returns: + A list of difference metadata member values. + + """ + # Perform "strict" difference for "topology_dimension", + # "node_dimension", "edge_dimension" and "face_dimension". + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in MeshMetadata._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + +class Mesh(CFVariableMixin): + """ + A container representing the UGRID ``cf_role`` ``mesh_topology``, supporting + 1D network, 2D triangular, and 2D flexible mesh topologies. + + .. note:: + + The 3D layered and fully 3D unstructured mesh topologies are not supported + at this time. + + .. seealso:: + + The UGRID Conventions, https://ugrid-conventions.github.io/ugrid-conventions/ + + """ + + # TBD: for volume and/or z-axis support include axis "z" and/or dimension "3" + #: The supported mesh axes. + AXES = ("x", "y") + #: Valid range of values for ``topology_dimension``. + TOPOLOGY_DIMENSIONS = (1, 2) + #: Valid mesh locations. + LOCATIONS = ("edge", "node", "face") + + def __init__( + self, + topology_dimension, + node_coords_and_axes, + connectivities, + edge_coords_and_axes=None, + face_coords_and_axes=None, + standard_name=None, + long_name=None, + var_name=None, + units=None, + attributes=None, + node_dimension=None, + edge_dimension=None, + face_dimension=None, + ): + """ + .. note:: + + The purpose of the :attr:`node_dimension`, :attr:`edge_dimension` and + :attr:`face_dimension` properties are to preserve the original NetCDF + variable dimension names. Note that, only :attr:`edge_dimension` and + :attr:`face_dimension` are UGRID attributes, and are only present for + :attr:`topology_dimension` ``>=2``. + + """ + # TODO: support volumes. + # TODO: support (coord, "z") + + self._metadata_manager = metadata_manager_factory(MeshMetadata) + + # topology_dimension is read-only, so assign directly to the metadata manager + if topology_dimension not in self.TOPOLOGY_DIMENSIONS: + emsg = f"Expected 'topology_dimension' in range {self.TOPOLOGY_DIMENSIONS!r}, got {topology_dimension!r}." + raise ValueError(emsg) + self._metadata_manager.topology_dimension = topology_dimension + + self.node_dimension = node_dimension + self.edge_dimension = edge_dimension + self.face_dimension = face_dimension + + # assign the metadata to the metadata manager + self.standard_name = standard_name + self.long_name = long_name + self.var_name = var_name + self.units = units + self.attributes = attributes + + # based on the topology_dimension, create the appropriate coordinate manager + def normalise(location, axis): + result = str(axis).lower() + if result not in self.AXES: + emsg = f"Invalid axis specified for {location} coordinate {coord.name()!r}, got {axis!r}." + raise ValueError(emsg) + return f"{location}_{result}" + + if not isinstance(node_coords_and_axes, Iterable): + node_coords_and_axes = [node_coords_and_axes] + + if not isinstance(connectivities, Iterable): + connectivities = [connectivities] + + kwargs = {} + for coord, axis in node_coords_and_axes: + kwargs[normalise("node", axis)] = coord + if edge_coords_and_axes is not None: + for coord, axis in edge_coords_and_axes: + kwargs[normalise("edge", axis)] = coord + if face_coords_and_axes is not None: + for coord, axis in face_coords_and_axes: + kwargs[normalise("face", axis)] = coord + + # check the UGRID minimum requirement for coordinates + if "node_x" not in kwargs: + emsg = ( + "Require a node coordinate that is x-axis like to be provided." + ) + raise ValueError(emsg) + if "node_y" not in kwargs: + emsg = ( + "Require a node coordinate that is y-axis like to be provided." + ) + raise ValueError(emsg) + + if self.topology_dimension == 1: + self._coord_manager = _Mesh1DCoordinateManager(**kwargs) + self._connectivity_manager = _Mesh1DConnectivityManager( + *connectivities + ) + elif self.topology_dimension == 2: + self._coord_manager = _Mesh2DCoordinateManager(**kwargs) + self._connectivity_manager = _Mesh2DConnectivityManager( + *connectivities + ) + else: + emsg = f"Unsupported 'topology_dimension', got {topology_dimension!r}." + raise NotImplementedError(emsg) + + def __eq__(self, other): + # TBD: this is a minimalist implementation and requires to be revisited + return id(self) == id(other) + + def __getstate__(self): + return ( + self._metadata_manager, + self._coord_manager, + self._connectivity_manager, + ) + + def __ne__(self, other): + result = self.__eq__(other) + if result is not NotImplemented: + result = not result + return result + + def __repr__(self): + def to_coord_and_axis(members): + def axis(member): + return member.split("_")[1] + + result = [ + f"({coord!s}, {axis(member)!r})" + for member, coord in members._asdict().items() + if coord is not None + ] + result = f"[{', '.join(result)}]" if result else None + return result + + node_coords_and_axes = to_coord_and_axis(self.node_coords) + connectivities = [ + str(connectivity) + for connectivity in self.all_connectivities + if connectivity is not None + ] + + if len(connectivities) == 1: + connectivities = connectivities[0] + else: + connectivities = f"[{', '.join(connectivities)}]" + + # positional arguments + args = [ + f"topology_dimension={self.topology_dimension!r}", + f"node_coords_and_axes={node_coords_and_axes}", + f"connectivities={connectivities}", + ] + + # optional argument + edge_coords_and_axes = to_coord_and_axis(self.edge_coords) + if edge_coords_and_axes: + args.append(f"edge_coords_and_axes={edge_coords_and_axes}") + + # optional argument + if self.topology_dimension > 1: + face_coords_and_axes = to_coord_and_axis(self.face_coords) + if face_coords_and_axes: + args.append(f"face_coords_and_axes={face_coords_and_axes}") + + def kwargs_filter(k, v): + result = False + if k != "topology_dimension": + if not ( + self.topology_dimension == 1 and k == "face_dimension" + ): + if v is not None: + result = True + if ( + not isinstance(v, str) + and isinstance(v, Iterable) + and not v + ): + result = False + elif k == "units" and v == "unknown": + result = False + return result + + # optional arguments (metadata) + args.extend( + [ + f"{k}={v!r}" + for k, v in self.metadata._asdict().items() + if kwargs_filter(k, v) + ] + ) + + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + metadata_manager, coord_manager, connectivity_manager = state + self._metadata_manager = metadata_manager + self._coord_manager = coord_manager + self._connectivity_manager = connectivity_manager + + def _set_dimension_names(self, node, edge, face, reset=False): + args = (node, edge, face) + currents = ( + self.node_dimension, + self.edge_dimension, + self.face_dimension, + ) + zipped = zip(args, currents) + if reset: + node, edge, face = [ + None if arg else current for arg, current in zipped + ] + else: + node, edge, face = [arg or current for arg, current in zipped] + + self.node_dimension = node + self.edge_dimension = edge + self.face_dimension = face + + if self.topology_dimension == 1: + result = Mesh1DNames(self.node_dimension, self.edge_dimension) + elif self.topology_dimension == 2: + result = Mesh2DNames( + self.node_dimension, self.edge_dimension, self.face_dimension + ) + else: + message = ( + f"Unsupported topology_dimension: {self.topology_dimension} ." + ) + raise NotImplementedError(message) + + return result + + @property + def all_connectivities(self): + """ + All the :class:`Connectivity` instances of the :class:`Mesh`. + + """ + return self._connectivity_manager.all_members + + @property + def all_coords(self): + """ + All the :class:`~iris.coords.AuxCoord` coordinates of the :class:`Mesh`. + + """ + return self._coord_manager.all_members + + @property + def boundary_node_connectivity(self): + """ + The *optional* UGRID ``boundary_node_connectivity`` :class:`Connectivity` + of the :class:`Mesh`. + + """ + return self._connectivity_manager.boundary_node + + @property + def edge_coords(self): + """ + The *optional* UGRID ``edge`` :class:`~iris.coords.AuxCoord` coordinates + of the :class:`Mesh`. + + """ + return self._coord_manager.edge_coords + + @property + def edge_dimension(self): + """ + The *optionally required* UGRID NetCDF variable name for the ``edge`` + dimension. + + """ + return self._metadata_manager.edge_dimension + + @edge_dimension.setter + def edge_dimension(self, name): + if not name or not isinstance(name, str): + edge_dimension = f"Mesh{self.topology_dimension}d_edge" + else: + edge_dimension = name + self._metadata_manager.edge_dimension = edge_dimension + + @property + def edge_face_connectivity(self): + """ + The *optional* UGRID ``edge_face_connectivity`` :class:`Connectivity` + of the :class:`Mesh`. + + """ + return self._connectivity_manager.edge_face + + @property + def edge_node_connectivity(self): + """ + The UGRID ``edge_node_connectivity`` :class:`Connectivity` of the + :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` + of ``1``, and *optionally required* for + :attr:`Mesh.topology_dimension` ``>=2``. + + """ + return self._connectivity_manager.edge_node + + @property + def face_coords(self): + """ + The *optional* UGRID ``face`` :class:`~iris.coords.AuxCoord` coordinates + of the :class:`Mesh`. + + """ + return self._coord_manager.face_coords + + @property + def face_dimension(self): + """ + The *optionally required* UGRID NetCDF variable name for the ``face`` + dimension. + + """ + return self._metadata_manager.face_dimension + + @face_dimension.setter + def face_dimension(self, name): + if self.topology_dimension < 2: + face_dimension = None + if name: + # Tell the user it is not being set if they expected otherwise. + message = ( + "Not setting face_dimension (inappropriate for " + f"topology_dimension={self.topology_dimension} ." + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + elif not name or not isinstance(name, str): + face_dimension = f"Mesh{self.topology_dimension}d_face" + else: + face_dimension = name + self._metadata_manager.face_dimension = face_dimension + + @property + def face_edge_connectivity(self): + """ + The *optional* UGRID ``face_edge_connectivity`` :class:`Connectivity` + of the :class:`Mesh`. + + """ + # optional + return self._connectivity_manager.face_edge + + @property + def face_face_connectivity(self): + """ + The *optional* UGRID ``face_face_connectivity`` :class:`Connectivity` + of the :class:`Mesh`. + + """ + return self._connectivity_manager.face_face + + @property + def face_node_connectivity(self): + """ + The UGRID ``face_node_connectivity`` :class:`Connectivity` of the + :class:`Mesh`, which is **required** for :attr:`Mesh.topology_dimension` + of ``2``, and *optionally required* for :attr:`Mesh.topology_dimension` + of ``3``. + + """ + return self._connectivity_manager.face_node + + @property + def node_coords(self): + """ + The **required** UGRID ``node`` :class:`~iris.coords.AuxCoord` coordinates + of the :class:`Mesh`. + + """ + return self._coord_manager.node_coords + + @property + def node_dimension(self): + """The NetCDF variable name for the ``node`` dimension.""" + return self._metadata_manager.node_dimension + + @node_dimension.setter + def node_dimension(self, name): + if not name or not isinstance(name, str): + node_dimension = f"Mesh{self.topology_dimension}d_node" + else: + node_dimension = name + self._metadata_manager.node_dimension = node_dimension + + def add_connectivities(self, *connectivities): + """ + Add one or more :class:`Connectivity` instances to the :class:`Mesh`. + + Args: + + * connectivities (iterable of object): + A collection of one or more :class:`Connectivity` instances to + add to the :class:`Mesh`. + + """ + self._connectivity_manager.add(*connectivities) + + def add_coords( + self, + node_x=None, + node_y=None, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + """ + Add one or more :class:`~iris.coords.AuxCoord` coordinates to the :class:`Mesh`. + + Kwargs: + + * node_x (object): + The ``x-axis`` like ``node`` :class:`~iris.coords.AuxCoord`. + + * node_y (object): + The ``y-axis`` like ``node`` :class:`~iris.coords.AuxCoord`. + + * edge_x (object): + The ``x-axis`` like ``edge`` :class:`~iris.coords.AuxCoord`. + + * edge_y (object): + The ``y-axis`` like ``edge`` :class:`~iris.coords.AuxCoord`. + + * face_x (object): + The ``x-axis`` like ``face`` :class:`~iris.coords.AuxCoord`. + + * face_y (object): + The ``y-axis`` like ``face`` :class:`~iris.coords.AuxCoord`. + + """ + # Filter out absent arguments - only expecting face coords sometimes, + # same will be true of volumes in future. + kwargs = { + "node_x": node_x, + "node_y": node_y, + "edge_x": edge_x, + "edge_y": edge_y, + "face_x": face_x, + "face_y": face_y, + } + kwargs = {k: v for k, v in kwargs.items() if v} + + self._coord_manager.add(**kwargs) + + def connectivities( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + contains_node=None, + contains_edge=None, + contains_face=None, + ): + """ + Return all :class:`Connectivity` instances from the :class:`Mesh` that + match the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + .. seealso:: + + :meth:`Mesh.connectivity` for matching exactly one connectivity. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a connectivity or metadata instance equal to that of + the desired objects e.g., :class:`Connectivity` or + :class:`ConnectivityMetadata`. + + * standard_name (str): + The CF standard name of the desired :class:`Connectivity`. If + ``None``, does not check for ``standard_name``. + + * long_name (str): + An unconstrained description of the :class:`Connectivity`. If + ``None``, does not check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired :class:`Connectivity`. If + ``None``, does not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the :class:`Connectivity`. If + ``None``, does not check for ``attributes``. + + * cf_role (str): + The UGRID ``cf_role`` of the desired :class:`Connectivity`. + + * contains_node (bool): + Contains the ``node`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + * contains_edge (bool): + Contains the ``edge`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + * contains_face (bool): + Contains the ``face`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + Returns: + A list of :class:`Connectivity` instances from the :class:`Mesh` + that matched the given criteria. + + """ + result = self._connectivity_manager.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, + ) + return list(result.values()) + + def connectivity( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + contains_node=None, + contains_edge=None, + contains_face=None, + ): + """ + Return a single :class:`Connectivity` from the :class:`Mesh` that + matches the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + .. note:: + + If the given criteria do not return **precisely one** + :class:`Connectivity`, then a + :class:`~iris.exceptions.ConnectivityNotFoundError` is raised. + + .. seealso:: + + :meth:`Mesh.connectivities` for matching zero or more connectivities. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a connectivity or metadata instance equal to that of + the desired object e.g., :class:`Connectivity` or + :class:`ConnectivityMetadata`. + + * standard_name (str): + The CF standard name of the desired :class:`Connectivity`. If + ``None``, does not check for ``standard_name``. + + * long_name (str): + An unconstrained description of the :class:`Connectivity`. If + ``None``, does not check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired :class:`Connectivity`. If + ``None``, does not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the :class:`Connectivity`. If + ``None``, does not check for ``attributes``. + + * cf_role (str): + The UGRID ``cf_role`` of the desired :class:`Connectivity`. + + * contains_node (bool): + Contains the ``node`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + * contains_edge (bool): + Contains the ``edge`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + * contains_face (bool): + Contains the ``face`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched. + + Returns: + The :class:`Connectivity` from the :class:`Mesh` that matched the + given criteria. + + """ + + result = self._connectivity_manager.filter( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, + ) + return list(result.values())[0] + + def coord( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + include_faces=None, + ): + """ + Return a single :class:`~iris.coords.AuxCoord` coordinate from the + :class:`Mesh` that matches the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + .. note:: + + If the given criteria do not return **precisely one** coordinate, + then a :class:`~iris.exceptions.CoordinateNotFoundError` is raised. + + .. seealso:: + + :meth:`Mesh.coords` for matching zero or more coordinates. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a coordinate or metadata instance equal to that of + the desired coordinate e.g., :class:`~iris.coords.AuxCoord` or + :class:`~iris.common.metadata.CoordMetadata`. + + * standard_name (str): + The CF standard name of the desired coordinate. If ``None``, does not + check for ``standard_name``. + + * long_name (str): + An unconstrained description of the coordinate. If ``None``, does not + check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired coordinate. If ``None``, does + not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the coordinates. If ``None``, + does not check for ``attributes``. + + * axis (str): + The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + * include_node (bool): + Include all ``node`` coordinates in the list of objects to be matched. + + * include_edge (bool): + Include all ``edge`` coordinates in the list of objects to be matched. + + * include_face (bool): + Include all ``face`` coordinates in the list of objects to be matched. + + Returns: + The :class:`~iris.coords.AuxCoord` coordinate from the :class:`Mesh` + that matched the given criteria. + + """ + result = self._coord_manager.filter( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, + ) + return list(result.values())[0] + + def coords( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + include_faces=None, + ): + """ + Return all :class:`~iris.coords.AuxCoord` coordinates from the :class:`Mesh` that + match the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + .. seealso:: + + :meth:`Mesh.coord` for matching exactly one coordinate. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a coordinate or metadata instance equal to that of + the desired coordinates e.g., :class:`~iris.coords.AuxCoord` or + :class:`~iris.common.metadata.CoordMetadata`. + + * standard_name (str): + The CF standard name of the desired coordinate. If ``None``, does not + check for ``standard_name``. + + * long_name (str): + An unconstrained description of the coordinate. If ``None``, does not + check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired coordinate. If ``None``, does + not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the coordinates. If ``None``, + does not check for ``attributes``. + + * axis (str): + The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + * include_node (bool): + Include all ``node`` coordinates in the list of objects to be matched. + + * include_edge (bool): + Include all ``edge`` coordinates in the list of objects to be matched. + + * include_face (bool): + Include all ``face`` coordinates in the list of objects to be matched. + + Returns: + A list of :class:`~iris.coords.AuxCoord` coordinates from the + :class:`Mesh` that matched the given criteria. + + """ + result = self._coord_manager.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, + ) + return list(result.values()) + + def remove_connectivities( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + contains_node=None, + contains_edge=None, + contains_face=None, + ): + """ + Remove one or more :class:`Connectivity` from the :class:`Mesh` that + match the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a connectivity or metadata instance equal to that of + the desired objects e.g., :class:`Connectivity` or + :class:`ConnectivityMetadata`. + + * standard_name (str): + The CF standard name of the desired :class:`Connectivity`. If + ``None``, does not check for ``standard_name``. + + * long_name (str): + An unconstrained description of the :class:`Connectivity. If + ``None``, does not check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired :class:`Connectivity`. If + ``None``, does not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the :class:`Connectivity`. If + ``None``, does not check for ``attributes``. + + * cf_role (str): + The UGRID ``cf_role`` of the desired :class:`Connectivity`. + + * contains_node (bool): + Contains the ``node`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched + for potential removal. + + * contains_edge (bool): + Contains the ``edge`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched + for potential removal. + + * contains_face (bool): + Contains the ``face`` location as part of the + :attr:`ConnectivityMetadata.cf_role` in the list of objects to be matched + for potential removal. + + Returns: + A list of :class:`Connectivity` instances removed from the :class:`Mesh` + that matched the given criteria. + + """ + return self._connectivity_manager.remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, + ) + + def remove_coords( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + include_faces=None, + ): + """ + Remove one or more :class:`~iris.coords.AuxCoord` from the :class:`Mesh` + that match the provided criteria. + + Criteria can be either specific properties or other objects with + metadata to be matched. + + Kwargs: + + * item (str or object): + Either, + + * a :attr:`~iris.common.mixin.CFVariableMixin.standard_name`, + :attr:`~iris.common.mixin.CFVariableMixin.long_name`, or + :attr:`~iris.common.mixin.CFVariableMixin.var_name` which is + compared against the :meth:`~iris.common.mixin.CFVariableMixin.name`. + + * a coordinate or metadata instance equal to that of + the desired coordinates e.g., :class:`~iris.coords.AuxCoord` or + :class:`~iris.common.metadata.CoordMetadata`. + + * standard_name (str): + The CF standard name of the desired coordinate. If ``None``, does not + check for ``standard_name``. + + * long_name (str): + An unconstrained description of the coordinate. If ``None``, does not + check for ``long_name``. + + * var_name (str): + The NetCDF variable name of the desired coordinate. If ``None``, does + not check for ``var_name``. + + * attributes (dict): + A dictionary of attributes desired on the coordinates. If ``None``, + does not check for ``attributes``. + + * axis (str): + The desired coordinate axis, see :func:`~iris.util.guess_coord_axis`. + If ``None``, does not check for ``axis``. Accepts the values ``X``, + ``Y``, ``Z`` and ``T`` (case-insensitive). + + * include_node (bool): + Include all ``node`` coordinates in the list of objects to be matched + for potential removal. + + * include_edge (bool): + Include all ``edge`` coordinates in the list of objects to be matched + for potential removal. + + * include_face (bool): + Include all ``face`` coordinates in the list of objects to be matched + for potential removal. + + Returns: + A list of :class:`~iris.coords.AuxCoord` coordinates removed from + the :class:`Mesh` that matched the given criteria. + + """ + # Filter out absent arguments - only expecting face coords sometimes, + # same will be true of volumes in future. + kwargs = { + "item": item, + "standard_name": standard_name, + "long_name": long_name, + "var_name": var_name, + "attributes": attributes, + "axis": axis, + "include_nodes": include_nodes, + "include_edges": include_edges, + "include_faces": include_faces, + } + kwargs = {k: v for k, v in kwargs.items() if v} + + return self._coord_manager.remove(**kwargs) + + def xml_element(self, doc): + """ + Create the :class:`xml.dom.minidom.Element` that describes this + :class:`Mesh`. + + Args: + + * doc (object): + The parent :class:`xml.dom.minidom.Document`. + + Returns: + The :class:`xml.dom.minidom.Element` that will describe this + :class:`Mesh`, and the dictionary of attributes that require + to be added to this element. + + """ + pass + + # the MeshCoord will always have bounds, perhaps points. However the MeshCoord.guess_points() may + # be a very useful part of its behaviour. + # after using MeshCoord.guess_points(), the user may wish to add the associated MeshCoord.points into + # the Mesh as face_coordinates. + + # def to_AuxCoord(self, location, axis): + # # factory method + # # return the lazy AuxCoord(...) for the given location and axis + # + # def to_AuxCoords(self, location): + # # factory method + # # return the lazy AuxCoord(...), AuxCoord(...) + + def to_MeshCoord(self, location, axis): + """ + Generate a :class:`MeshCoord` that references the current + :class:`Mesh`, and passing through the ``location`` and ``axis`` + arguments. + + .. seealso:: + + :meth:`to_MeshCoords` for generating a series of mesh coords. + + Args: + + * location (str) + The ``location`` argument for :class:`MeshCoord` instantiation. + + * axis (str) + The ``axis`` argument for :class:`MeshCoord` instantiation. + + Returns: + A :class:`MeshCoord` referencing the current :class:`Mesh`. + + """ + return MeshCoord(mesh=self, location=location, axis=axis) + + def to_MeshCoords(self, location): + """ + Generate a tuple of :class:`MeshCoord`'s, each referencing the current + :class:`Mesh`, one for each :attr:`AXES` value, passing through the + ``location`` argument. + + .. seealso:: + + :meth:`to_MeshCoord` for generating a single mesh coord. + + Args: + + * location (str) + The ``location`` argument for :class:`MeshCoord` instantiation. + + Returns: + tuple of :class:`MeshCoord`'s referencing the current :class:`Mesh`. + One for each value in :attr:`AXES`, using the value for the + ``axis`` argument. + + """ + # factory method + result = [ + self.to_MeshCoord(location=location, axis=ax) for ax in self.AXES + ] + return tuple(result) + + def dimension_names_reset(self, node=False, edge=False, face=False): + """ + Reset the name used for the NetCDF variable representing the ``node``, + ``edge`` and/or ``face`` dimension to ``None``. + + Kwargs: + + * node (bool): + Reset the name of the ``node`` dimension if ``True``. Default + is ``False``. + + * edge (bool): + Reset the name of the ``edge`` dimension if ``True``. Default + is ``False``. + + * face (bool): + Reset the name of the ``face`` dimension if ``True``. Default + is ``False``. + + """ + return self._set_dimension_names(node, edge, face, reset=True) + + def dimension_names(self, node=None, edge=None, face=None): + """ + Assign the name to be used for the NetCDF variable representing + the ``node``, ``edge`` and ``face`` dimension. + + The default value of ``None`` will not be assigned to clear the + associated ``node``, ``edge`` or ``face``. Instead use + :meth:`Mesh.dimension_names_reset`. + + Kwargs: + + * node (str): + The name to be used for the NetCDF variable representing the + ``node`` dimension. + + * edge (str): + The name to be used for the NetCDF variable representing the + ``edge`` dimension. + + * face (str): + The name to be used for the NetCDF variable representing the + ``face`` dimension. + + """ + return self._set_dimension_names(node, edge, face, reset=False) + + @property + def cf_role(self): + """The UGRID ``cf_role`` attribute of the :class:`Mesh`.""" + return "mesh_topology" + + @property + def topology_dimension(self): + """ + The UGRID ``topology_dimension`` attribute represents the highest + dimensionality of all the geometric elements (node, edge, face) represented + within the :class:`Mesh`. + + """ + return self._metadata_manager.topology_dimension + + +class _Mesh1DCoordinateManager: + """ + + TBD: require clarity on coord_systems validation + TBD: require clarity on __eq__ support + TBD: rationalise self.coords() logic with other manager and Cube + + """ + + REQUIRED = ( + "node_x", + "node_y", + ) + OPTIONAL = ( + "edge_x", + "edge_y", + ) + + def __init__(self, node_x, node_y, edge_x=None, edge_y=None): + # initialise all the coordinates + self.ALL = self.REQUIRED + self.OPTIONAL + self._members = {member: None for member in self.ALL} + + # required coordinates + self.node_x = node_x + self.node_y = node_y + # optional coordinates + self.edge_x = edge_x + self.edge_y = edge_y + + def __eq__(self, other): + # TBD: this is a minimalist implementation and requires to be revisited + return id(self) == id(other) + + def __getstate__(self): + return self._members + + def __iter__(self): + for item in self._members.items(): + yield item + + def __ne__(self, other): + result = self.__eq__(other) + if result is not NotImplemented: + result = not result + return result + + def __repr__(self): + args = [ + f"{member}={coord!r}" + for member, coord in self + if coord is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + self._members = state + + def __str__(self): + args = [f"{member}" for member, coord in self if coord is not None] + return f"{self.__class__.__name__}({', '.join(args)})" + + def _remove(self, **kwargs): + result = {} + members = self.filters(**kwargs) + + for member in members.keys(): + if member in self.REQUIRED: + dmsg = f"Ignoring request to remove required coordinate {member!r}" + logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) + else: + result[member] = members[member] + setattr(self, member, None) + + return result + + def _setter(self, location, axis, coord, shape): + axis = axis.lower() + member = f"{location}_{axis}" + + # enforce the UGRID minimum coordinate requirement + if location == "node" and coord is None: + emsg = ( + f"{member!r} is a required coordinate, cannot set to 'None'." + ) + raise ValueError(emsg) + + if coord is not None: + if not isinstance(coord, AuxCoord): + emsg = f"{member!r} requires to be an 'AuxCoord', got {type(coord)}." + raise TypeError(emsg) + + guess_axis = guess_coord_axis(coord) + + if guess_axis and guess_axis.lower() != axis: + emsg = f"{member!r} requires a {axis}-axis like 'AuxCoord', got a {guess_axis.lower()}-axis like." + raise TypeError(emsg) + + if coord.climatological: + emsg = f"{member!r} cannot be a climatological 'AuxCoord'." + raise TypeError(emsg) + + if shape is not None and coord.shape != shape: + emsg = f"{member!r} requires to have shape {shape!r}, got {coord.shape!r}." + raise ValueError(emsg) + + self._members[member] = coord + + def _shape(self, location): + coord = getattr(self, f"{location}_x") + shape = coord.shape if coord is not None else None + if shape is None: + coord = getattr(self, f"{location}_y") + if coord is not None: + shape = coord.shape + return shape + + @property + def _edge_shape(self): + return self._shape(location="edge") + + @property + def _node_shape(self): + return self._shape(location="node") + + @property + def all_members(self): + return Mesh1DCoords(**self._members) + + @property + def edge_coords(self): + return MeshEdgeCoords(edge_x=self.edge_x, edge_y=self.edge_y) + + @property + def edge_x(self): + return self._members["edge_x"] + + @edge_x.setter + def edge_x(self, coord): + self._setter( + location="edge", axis="x", coord=coord, shape=self._edge_shape + ) + + @property + def edge_y(self): + return self._members["edge_y"] + + @edge_y.setter + def edge_y(self, coord): + self._setter( + location="edge", axis="y", coord=coord, shape=self._edge_shape + ) + + @property + def node_coords(self): + return MeshNodeCoords(node_x=self.node_x, node_y=self.node_y) + + @property + def node_x(self): + return self._members["node_x"] + + @node_x.setter + def node_x(self, coord): + self._setter( + location="node", axis="x", coord=coord, shape=self._node_shape + ) + + @property + def node_y(self): + return self._members["node_y"] + + @node_y.setter + def node_y(self, coord): + self._setter( + location="node", axis="y", coord=coord, shape=self._node_shape + ) + + def _add(self, coords): + member_x, member_y = coords._fields + + # deal with the special case where both members are changing + if coords[0] is not None and coords[1] is not None: + cache_x = self._members[member_x] + cache_y = self._members[member_y] + self._members[member_x] = None + self._members[member_y] = None + + try: + setattr(self, member_x, coords[0]) + setattr(self, member_y, coords[1]) + except (TypeError, ValueError): + # restore previous valid state + self._members[member_x] = cache_x + self._members[member_y] = cache_y + # now, re-raise the exception + raise + else: + # deal with the case where one or no member is changing + if coords[0] is not None: + setattr(self, member_x, coords[0]) + if coords[1] is not None: + setattr(self, member_y, coords[1]) + + def add(self, node_x=None, node_y=None, edge_x=None, edge_y=None): + """ + use self.remove(edge_x=True) to remove a coordinate e.g., using the + pattern self.add(edge_x=None) will not remove the edge_x coordinate + + """ + self._add(MeshNodeCoords(node_x, node_y)) + self._add(MeshEdgeCoords(edge_x, edge_y)) + + def filter(self, **kwargs): + # TODO: rationalise commonality with MeshConnectivityManager.filter and Cube.coord. + result = self.filters(**kwargs) + + if len(result) > 1: + names = ", ".join( + f"{member}={coord!r}" for member, coord in result.items() + ) + emsg = ( + f"Expected to find exactly 1 coordinate, but found {len(result)}. " + f"They were: {names}." + ) + raise CoordinateNotFoundError(emsg) + + if len(result) == 0: + item = kwargs["item"] + if item is not None: + if not isinstance(item, str): + item = item.name() + name = ( + item + or kwargs["standard_name"] + or kwargs["long_name"] + or kwargs["var_name"] + or None + ) + name = "" if name is None else f"{name!r} " + emsg = ( + f"Expected to find exactly 1 {name}coordinate, but found none." + ) + raise CoordinateNotFoundError(emsg) + + return result + + def filters( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + include_faces=None, + ): + # TBD: support coord_systems? + + # Preserve original argument before modifying. + face_requested = include_faces + + # Rationalise the tri-state behaviour. + args = [include_nodes, include_edges, include_faces] + state = not any(set(filter(lambda arg: arg is not None, args))) + include_nodes, include_edges, include_faces = map( + lambda arg: arg if arg is not None else state, args + ) + + def populated_coords(coords_tuple): + return list(filter(None, list(coords_tuple))) + + members = [] + if include_nodes: + members += populated_coords(self.node_coords) + if include_edges: + members += populated_coords(self.edge_coords) + if hasattr(self, "face_coords"): + if include_faces: + members += populated_coords(self.face_coords) + elif face_requested: + dmsg = "Ignoring request to filter non-existent 'face_coords'" + logger.debug(dmsg, extra=dict(cls=self.__class__.__name__)) + + result = metadata_filter( + members, + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + ) + + # Use the results to filter the _members dict for returning. + result_ids = [id(r) for r in result] + result_dict = { + k: v for k, v in self._members.items() if id(v) in result_ids + } + return result_dict + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + ): + return self._remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + include_nodes=include_nodes, + include_edges=include_edges, + ) + + +class _Mesh2DCoordinateManager(_Mesh1DCoordinateManager): + OPTIONAL = ( + "edge_x", + "edge_y", + "face_x", + "face_y", + ) + + def __init__( + self, + node_x, + node_y, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + super().__init__(node_x, node_y, edge_x=edge_x, edge_y=edge_y) + + # optional coordinates + self.face_x = face_x + self.face_y = face_y + + @property + def _face_shape(self): + return self._shape(location="face") + + @property + def all_members(self): + return Mesh2DCoords(**self._members) + + @property + def face_coords(self): + return MeshFaceCoords(face_x=self.face_x, face_y=self.face_y) + + @property + def face_x(self): + return self._members["face_x"] + + @face_x.setter + def face_x(self, coord): + self._setter( + location="face", axis="x", coord=coord, shape=self._face_shape + ) + + @property + def face_y(self): + return self._members["face_y"] + + @face_y.setter + def face_y(self, coord): + self._setter( + location="face", axis="y", coord=coord, shape=self._face_shape + ) + + def add( + self, + node_x=None, + node_y=None, + edge_x=None, + edge_y=None, + face_x=None, + face_y=None, + ): + super().add(node_x=node_x, node_y=node_y, edge_x=edge_x, edge_y=edge_y) + self._add(MeshFaceCoords(face_x, face_y)) + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + axis=None, + include_nodes=None, + include_edges=None, + include_faces=None, + ): + return self._remove( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + axis=axis, + include_nodes=include_nodes, + include_edges=include_edges, + include_faces=include_faces, + ) + + +class _MeshConnectivityManagerBase(ABC): + # Override these in subclasses. + REQUIRED: tuple = NotImplemented + OPTIONAL: tuple = NotImplemented + + def __init__(self, *connectivities): + cf_roles = [c.cf_role for c in connectivities] + for requisite in self.REQUIRED: + if requisite not in cf_roles: + message = f"{type(self).__name__} requires a {requisite} Connectivity." + raise ValueError(message) + + self.ALL = self.REQUIRED + self.OPTIONAL + self._members = {member: None for member in self.ALL} + self.add(*connectivities) + + def __eq__(self, other): + # TBD: this is a minimalist implementation and requires to be revisited + return id(self) == id(other) + + def __getstate__(self): + return self._members + + def __iter__(self): + for item in self._members.items(): + yield item + + def __ne__(self, other): + result = self.__eq__(other) + if result is not NotImplemented: + result = not result + return result + + def __repr__(self): + args = [ + f"{member}={connectivity!r}" + for member, connectivity in self + if connectivity is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + def __setstate__(self, state): + self._members = state + + def __str__(self): + args = [ + f"{member}" + for member, connectivity in self + if connectivity is not None + ] + return f"{self.__class__.__name__}({', '.join(args)})" + + @property + @abstractmethod + def all_members(self): + return NotImplemented + + def add(self, *connectivities): + # Since Connectivity classes include their cf_role, no setters will be + # provided, just a means to add one or more connectivities to the + # manager. + # No warning is raised for duplicate cf_roles - user is trusted to + # validate their outputs. + add_dict = {} + for connectivity in connectivities: + if not isinstance(connectivity, Connectivity): + message = f"Expected Connectivity, got: {type(connectivity)} ." + raise TypeError(message) + cf_role = connectivity.cf_role + if cf_role not in self.ALL: + message = ( + f"Not adding connectivity ({cf_role}: " + f"{connectivity!r}) - cf_role must be one of: {self.ALL} ." + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + else: + add_dict[cf_role] = connectivity + + # Validate shapes. + proposed_members = {**self._members, **add_dict} + locations = set( + [ + c.src_location + for c in proposed_members.values() + if c is not None + ] + ) + for location in locations: + counts = [ + len(c.indices_by_src(c.lazy_indices())) + for c in proposed_members.values() + if c is not None and c.src_location == location + ] + # Check is list values are identical. + if not counts.count(counts[0]) == len(counts): + message = ( + f"Invalid Connectivities provided - inconsistent " + f"{location} counts." + ) + raise ValueError(message) + + self._members = proposed_members + + def filter(self, **kwargs): + # TODO: rationalise commonality with MeshCoordManager.filter and Cube.coord. + result = self.filters(**kwargs) + if len(result) > 1: + names = ", ".join( + f"{member}={connectivity!r}" + for member, connectivity in result.items() + ) + message = ( + f"Expected to find exactly 1 connectivity, but found " + f"{len(result)}. They were: {names}." + ) + raise ConnectivityNotFoundError(message) + elif len(result) == 0: + item = kwargs["item"] + _name = item + if item is not None: + if not isinstance(item, str): + _name = item.name() + bad_name = ( + _name or kwargs["standard_name"] or kwargs["long_name"] or "" + ) + message = ( + f"Expected to find exactly 1 {bad_name} connectivity, " + f"but found none." + ) + raise ConnectivityNotFoundError(message) + + return result + + def filters( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + contains_node=None, + contains_edge=None, + contains_face=None, + ): + members = [c for c in self._members.values() if c is not None] + + if cf_role is not None: + members = [ + instance for instance in members if instance.cf_role == cf_role + ] + + def location_filter(instances, loc_arg, loc_name): + if loc_arg is False: + filtered = [ + instance + for instance in instances + if loc_name + not in (instance.src_location, instance.tgt_location) + ] + elif loc_arg is None: + filtered = instances + else: + # Interpret any other value as =True. + filtered = [ + instance + for instance in instances + if loc_name + in (instance.src_location, instance.tgt_location) + ] + + return filtered + + for arg, loc in ( + (contains_node, "node"), + (contains_edge, "edge"), + (contains_face, "face"), + ): + members = location_filter(members, arg, loc) + + # No need to actually modify filtering behaviour - already won't return + # any face cf-roles if none are present. + supports_faces = any(["face" in role for role in self.ALL]) + if contains_face and not supports_faces: + message = ( + "Ignoring request to filter for non-existent 'face' cf-roles." + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + + result = metadata_filter( + members, + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + ) + + # Use the results to filter the _members dict for returning. + result_ids = [id(r) for r in result] + result_dict = { + k: v for k, v in self._members.items() if id(v) in result_ids + } + return result_dict + + def remove( + self, + item=None, + standard_name=None, + long_name=None, + var_name=None, + attributes=None, + cf_role=None, + contains_node=None, + contains_edge=None, + contains_face=None, + ): + removal_dict = self.filters( + item=item, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + attributes=attributes, + cf_role=cf_role, + contains_node=contains_node, + contains_edge=contains_edge, + contains_face=contains_face, + ) + for cf_role in self.REQUIRED: + excluded = removal_dict.pop(cf_role, None) + if excluded: + message = ( + f"Ignoring request to remove required connectivity " + f"({cf_role}: {excluded!r})" + ) + logger.debug(message, extra=dict(cls=self.__class__.__name__)) + + for cf_role in removal_dict.keys(): + self._members[cf_role] = None + + return removal_dict + + +class _Mesh1DConnectivityManager(_MeshConnectivityManagerBase): + REQUIRED = ("edge_node_connectivity",) + OPTIONAL = () + + @property + def all_members(self): + return Mesh1DConnectivities(edge_node=self.edge_node) + + @property + def edge_node(self): + return self._members["edge_node_connectivity"] + + +class _Mesh2DConnectivityManager(_MeshConnectivityManagerBase): + REQUIRED = ("face_node_connectivity",) + OPTIONAL = ( + "edge_node_connectivity", + "face_edge_connectivity", + "face_face_connectivity", + "edge_face_connectivity", + "boundary_node_connectivity", + ) + + @property + def all_members(self): + return Mesh2DConnectivities( + face_node=self.face_node, + edge_node=self.edge_node, + face_edge=self.face_edge, + face_face=self.face_face, + edge_face=self.edge_face, + boundary_node=self.boundary_node, + ) + + @property + def boundary_node(self): + return self._members["boundary_node_connectivity"] + + @property + def edge_face(self): + return self._members["edge_face_connectivity"] + + @property + def edge_node(self): + return self._members["edge_node_connectivity"] + + @property + def face_edge(self): + return self._members["face_edge_connectivity"] + + @property + def face_face(self): + return self._members["face_face_connectivity"] + + @property + def face_node(self): + return self._members["face_node_connectivity"] + + +class MeshCoord(AuxCoord): + """ + Geographic coordinate values of data on an unstructured mesh. + + A MeshCoord references a `~iris.experimental.ugrid.Mesh`. + When contained in a `~iris.cube.Cube` it connects the cube to the Mesh. + It records (a) which 1-D cube dimension represents the unstructured mesh, + and (b) which mesh 'location' the cube data is mapped to -- i.e. is it + data on 'face's, 'edge's or 'node's. + + A MeshCoord also specifies its 'axis' : 'x' or 'y'. Its values are then, + accordingly, longitudes or latitudes. The values are taken from the + appropriate coordinates and connectivities in the Mesh, determined by its + 'location' and 'axis'. + + Any cube with data on a mesh will have a MeshCoord for each axis, + i.e. an 'X' and a 'Y'. + + The points and bounds contain coordinate values for the mesh elements, + which depends on location. + For 'node', the ``.points`` contains node locations. + For 'edge', the ``.bounds`` contains edge endpoints, and the ``.points`` contain + edge locations (typically centres), if the Mesh contains them (optional). + For 'face', the ``.bounds`` contain the face corners, and the ``.points`` contain the + face locations (typically centres), if the Mesh contains them (optional). + + .. note:: + As described above, it is possible for a MeshCoord to have bounds but + no points. This is not possible for a regular + :class:`~iris.coords.AuxCoord` or :class:`~iris.coords.DimCoord`. + + .. note:: + A MeshCoord can not yet actually be created with bounds but no points. + This is intended in future, but for now it raises an error. + + """ + + def __init__( + self, + mesh, + location, + axis, + ): + # Setup the metadata. + self._metadata_manager = metadata_manager_factory(MeshCoordMetadata) + + # Validate and record the class-specific constructor args. + if not isinstance(mesh, Mesh): + msg = ( + "'mesh' must be an " + f"{Mesh.__module__}.{Mesh.__name__}, " + f"got {mesh}." + ) + raise TypeError(msg) + # Handled as a readonly ".mesh" property. + # NOTE: currently *not* included in metadata. In future it might be. + self._mesh = mesh + + if location not in Mesh.LOCATIONS: + msg = ( + f"'location' of {location} is not a valid Mesh location', " + f"must be one of {Mesh.LOCATIONS}." + ) + raise ValueError(msg) + # Held in metadata, readable as self.location, but cannot set it. + self._metadata_manager.location = location + + if axis not in Mesh.AXES: + # The valid axes are defined by the Mesh class. + msg = ( + f"'axis' of {axis} is not a valid Mesh axis', " + f"must be one of {Mesh.AXES}." + ) + raise ValueError(msg) + # Held in metadata, readable as self.axis, but cannot set it. + self._metadata_manager.axis = axis + + points, bounds = self._construct_access_arrays() + if points is None: + # TODO: we intend to support this in future, but it will require + # extra work to refactor the parent classes. + msg = "Cannot yet create a MeshCoord without points." + raise ValueError(msg) + + # Get the 'coord identity' metadata from the relevant node-coordinate. + node_coord = self.mesh.coord(include_nodes=True, axis=self.axis) + # Call parent constructor to handle the common constructor args. + super().__init__( + points, + bounds=bounds, + standard_name=node_coord.standard_name, + long_name=node_coord.long_name, + var_name=None, # We *don't* "represent" the underlying node var + units=node_coord.units, + attributes=node_coord.attributes, + ) + + # Define accessors for MeshCoord-specific properties mesh/location/axis. + # These are all read-only. + + @property + def mesh(self): + return self._mesh + + @property + def location(self): + return self._metadata_manager.location + + @property + def axis(self): + return self._metadata_manager.axis + + # Provide overrides to mimic the Coord-specific properties that are not + # supported by MeshCoord, i.e. "coord_system" and "climatological". + # These mimic the Coord properties, but always return fixed 'null' values. + # They can be set, to the 'null' value only, for the inherited init code. + + @property + def coord_system(self): + """The coordinate-system of a MeshCoord is always 'None'.""" + return None + + @coord_system.setter + def coord_system(self, value): + if value is not None: + msg = "Cannot set the coordinate-system of a MeshCoord." + raise ValueError(msg) + + @property + def climatological(self): + """The 'climatological' of a MeshCoord is always 'False'.""" + return False + + @climatological.setter + def climatological(self, value): + if value: + msg = "Cannot set 'climatological' on a MeshCoord." + raise ValueError(msg) + + def __getitem__(self, keys): + # Disallow any sub-indexing, permitting *only* "self[:,]". + # We *don't* intend here to support indexing as such : the exception is + # just sufficient to enable cube slicing, when it does not affect the + # mesh dimension. This works because Cube.__getitem__ passes us keys + # "normalised" with iris.util._build_full_slice_given_keys. + if keys != (slice(None),): + msg = "Cannot index a MeshCoord." + raise ValueError(msg) + + # Translate "self[:,]" as "self.copy()". + return self.copy() + + def copy(self, points=None, bounds=None): + """ + Make a copy of the MeshCoord. + + Kwargs: + + * points, bounds (array): + Provided solely for signature compatibility with other types of + :class:`~iris.coords.Coord`. + In this case, if either is not 'None', an error is raised. + + """ + # Override Coord.copy, so that we can ensure it does not duplicate the + # Mesh object (via deepcopy). + # This avoids copying Meshes. It is also required to allow a copied + # MeshCoord to be == the original, since for now Mesh == is only true + # for the same identical object. + + # FOR NOW: also disallow changing points/bounds at all. + if points is not None or bounds is not None: + msg = "Cannot change the content of a MeshCoord." + raise ValueError(msg) + + # Make a new MeshCoord with the same args : The Mesh is the *same* + # as the original (not a copy). + new_coord = MeshCoord( + mesh=self.mesh, location=self.location, axis=self.axis + ) + return new_coord + + def __deepcopy__(self, memo): + """ + Make this equivalent to "shallow" copy, returning a new MeshCoord based + on the same Mesh. + + Required to prevent cube copying from copying the Mesh, which would + prevent "cube.copy() == cube" : see notes for :meth:`copy`. + + """ + return self.copy() + + # Override _DimensionalMetadata.__eq__, to add 'mesh' comparison into the + # default implementation (which compares metadata, points and bounds). + # This is needed because 'mesh' is not included in our metadata. + def __eq__(self, other): + eq = NotImplemented + if isinstance(other, MeshCoord): + # *Don't* use the parent (_DimensionalMetadata) __eq__, as that + # will try to compare points and bounds arrays. + # Just compare the mesh, and the (other) metadata. + eq = self.mesh == other.mesh # N.B. 'mesh' not in metadata. + if eq is not NotImplemented and eq: + # Compare rest of metadata, but not points/bounds. + eq = self.metadata == other.metadata + + return eq + + # Exactly as for Coord.__hash__ : See there for why. + def __hash__(self): + return hash(id(self)) + + def _string_summary(self, repr_style): + # Note: bypass the immediate parent here, which is Coord, because we + # have no interest in reporting coord_system or climatological, or in + # printing out our points/bounds. + # We also want to list our defining properties, i.e. mesh/location/axis + # *first*, before names/units etc, so different from other Coord types. + + # First construct a shortform text summary to identify the Mesh. + # IN 'str-mode', this attempts to use Mesh.name() if it is set, + # otherwise uses an object-id style (as also for 'repr-mode'). + # TODO: use a suitable method provided by Mesh, e.g. something like + # "Mesh.summary(shorten=True)", when it is available. + mesh_name = None + if not repr_style: + mesh_name = self.mesh.name() + if mesh_name in (None, "", "unknown"): + mesh_name = None + if mesh_name: + # Use a more human-readable form + mesh_string = f"Mesh({mesh_name!r})" + else: + # Mimic the generic object.__str__ style. + mesh_id = id(self.mesh) + mesh_string = f"" + result = ( + f"mesh={mesh_string}" + f", location={self.location!r}" + f", axis={self.axis!r}" + ) + # Add 'other' metadata that is drawn from the underlying node-coord. + # But put these *afterward*, unlike other similar classes. + for item in ( + "shape", + "standard_name", + "units", + "long_name", + "attributes", + ): + # NOTE: order of these matches Coord.summary, but omit var_name. + val = getattr(self, item, None) + if item == "attributes": + is_blank = len(val) == 0 # an empty dict is as good as none + else: + is_blank = val is None + if not is_blank: + result += f", {item}={val!r}" + + result = f"MeshCoord({result})" + return result + + def __str__(self): + return self._string_summary(repr_style=False) + + def __repr__(self): + return self._string_summary(repr_style=True) + + def _construct_access_arrays(self): + """ + Build lazy points and bounds arrays, providing dynamic access via the + Mesh, according to the location and axis. + + Returns: + * points, bounds (array or None): + lazy arrays which calculate the correct points and bounds from the + Mesh data, based on the location and axis. + The Mesh coordinates accessed are not identified on construction, + but discovered from the Mesh at the time of calculation, so that + the result is always based on current content in the Mesh. + + """ + mesh, location, axis = self.mesh, self.location, self.axis + node_coord = self.mesh.coord(include_nodes=True, axis=axis) + + if location == "node": + points_coord = node_coord + bounds_connectivity = None + elif location == "edge": + points_coord = self.mesh.coord(include_edges=True, axis=axis) + bounds_connectivity = mesh.edge_node_connectivity + elif location == "face": + points_coord = self.mesh.coord(include_faces=True, axis=axis) + bounds_connectivity = mesh.face_node_connectivity + + # The points output is the points of the relevant element-type coord. + points = points_coord.core_points() + if bounds_connectivity is None: + bounds = None + else: + # Bounds are calculated from a connectivity and the node points. + # Data can be real or lazy, so operations must work in Dask, too. + indices = bounds_connectivity.core_indices() + # Normalise indices dimension order to [faces/edges, bounds] + indices = bounds_connectivity.indices_by_src(indices) + # Normalise the start index + indices = indices - bounds_connectivity.start_index + + node_points = node_coord.core_points() + n_nodes = node_points.shape[0] + # Choose real/lazy array library, to suit array types. + lazy = _lazy.is_lazy_data(indices) or _lazy.is_lazy_data( + node_points + ) + al = da if lazy else np + # NOTE: Dask cannot index with a multidimensional array, so we + # must flatten it and restore the shape later. + flat_inds = indices.flatten() + # NOTE: the connectivity array can have masked points, but we can't + # effectively index with those. So use a non-masked index array + # with "safe" index values, and post-mask the results. + flat_inds_nomask = al.ma.filled(flat_inds, -1) + # Note: *also* mask any places where the index is out of range. + missing_inds = (flat_inds_nomask < 0) | ( + flat_inds_nomask >= n_nodes + ) + flat_inds_safe = al.where(missing_inds, 0, flat_inds_nomask) + # Here's the core indexing operation. + # The comma applies all inds-array values to the *first* dimension. + bounds = node_points[ + flat_inds_safe, + ] + # Fix 'missing' locations, and restore the proper shape. + bounds = al.ma.masked_array(bounds, missing_inds) + bounds = bounds.reshape(indices.shape) + + return points, bounds + + +class MeshCoordMetadata(BaseMetadata): + """ + Metadata container for a :class:`~iris.coords.MeshCoord`. + """ + + _members = ("location", "axis") + # NOTE: in future, we may add 'mesh' as part of this metadata, + # as the Mesh seems part of the 'identity' of a MeshCoord. + # For now we omit it, particularly as we don't yet implement Mesh.__eq__. + # + # Thus, for now, the MeshCoord class will need to handle 'mesh' explicitly + # in identity / comparison, but in future that may be simplified. + + __slots__ = () + + @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + return super().__eq__(other) + + def _combine_lenient(self, other): + """ + Perform lenient combination of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other metadata participating in the lenient combination. + + Returns: + A list of combined metadata member values. + + """ + # It is actually "strict" : return None except where members are equal. + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return left if left == right else None + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in self._members] + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.extend(values) + + return result + + def _compare_lenient(self, other): + """ + Perform lenient equality of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other metadata participating in the lenient comparison. + + Returns: + Boolean. + + """ + # Perform "strict" comparison for the MeshCoord specific members + # 'location', 'axis' : for equality, they must all match. + result = all( + [ + getattr(self, field) == getattr(other, field) + for field in self._members + ] + ) + if result: + # Perform lenient comparison of the other parent members. + result = super()._compare_lenient(other) + + return result + + def _difference_lenient(self, other): + """ + Perform lenient difference of metadata members for MeshCoord. + + Args: + + * other (MeshCoordMetadata): + The other MeshCoord metadata participating in the lenient + difference. + + Returns: + A list of different metadata member values. + + """ + # Perform "strict" difference for location / axis. + def func(field): + left = getattr(self, field) + right = getattr(other, field) + return None if left == right else (left, right) + + # Note that, we use "_members" not "_fields". + values = [func(field) for field in self._members] + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.extend(values) + + return result + + @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + return super().combine(other, lenient=lenient) + + @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + return super().difference(other, lenient=lenient) + + @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + return super().equal(other, lenient=lenient) + + +# Add our new optional metadata operations into the 'convenience collections' +# of lenient metadata services. +# TODO: when included in 'iris.common.metadata', install each one directly ? +_op_names_and_service_collections = [ + ("combine", SERVICES_COMBINE), + ("difference", SERVICES_DIFFERENCE), + ("__eq__", SERVICES_EQUAL), + ("equal", SERVICES_EQUAL), +] +_metadata_classes = [ConnectivityMetadata, MeshMetadata, MeshCoordMetadata] +for _cls in _metadata_classes: + for _name, _service_collection in _op_names_and_service_collections: + _method = getattr(_cls, _name) + _service_collection.append(_method) + SERVICES.append(_method) + +del ( + _op_names_and_service_collections, + _metadata_classes, + _cls, + _name, + _service_collection, + _method, +) + + +############################################################################### +# LOADING + + +class ParseUGridOnLoad(threading.local): + def __init__(self): + """ + A flag for dictating whether to use the experimental UGRID-aware + version of Iris NetCDF loading. Object is thread-safe. + + Use via the run-time switch :const:`PARSE_UGRID_ON_LOAD`. + Use :meth:`context` to temporarily activate. + + .. seealso:: + + The UGRID Conventions, + https://ugrid-conventions.github.io/ugrid-conventions/ + + """ + self._state = False + + def __bool__(self): + return self._state + + @contextmanager + def context(self): + """ + Temporarily activate experimental UGRID-aware NetCDF loading. + + Use the standard Iris loading API while within the context manager. If + the loaded file(s) include any UGRID content, this will be parsed and + attached to the resultant cube(s) accordingly. + + Use via the run-time switch :const:`PARSE_UGRID_ON_LOAD`. + + For example:: + + with PARSE_UGRID_ON_LOAD.context(): + my_cube_list = iris.load([my_file_path, my_file_path2], + constraint=my_constraint, + callback=my_callback) + + """ + try: + self._state = True + yield + finally: + self._state = False + + +#: Run-time switch for experimental UGRID-aware NetCDF loading. See :class:`ParseUGridOnLoad`. +PARSE_UGRID_ON_LOAD = ParseUGridOnLoad() + + +############ +# CF Overrides. +# These are not included in __all__ since they are not [currently] needed +# outside this module. + + +class CFUGridConnectivityVariable(cf.CFVariable): + """ + A CF_UGRID connectivity variable points to an index variable identifying + for every element (edge/face/volume) the indices of its corner nodes. The + connectivity array will thus be a matrix of size n-elements x n-corners. + For the indexing one may use either 0- or 1-based indexing; the convention + used should be specified using a ``start_index`` attribute to the index + variable. + + For face elements: the corner nodes should be specified in anticlockwise + direction as viewed from above. For volume elements: use the + additional attribute ``volume_shape_type`` which points to a flag variable + that specifies for every volume its shape. + + Identified by a CF-netCDF variable attribute equal to any one of the values + in :attr:`~iris.experimental.ugrid.Connectivity.UGRID_CF_ROLES`. + + .. seealso:: + + The UGRID Conventions, https://ugrid-conventions.github.io/ugrid-conventions/ + + """ + + cf_identity = NotImplemented + cf_identities = Connectivity.UGRID_CF_ROLES + + @classmethod + def identify(cls, variables, ignore=None, target=None, warn=True): + result = {} + ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG + + # Identify all CF-UGRID connectivity variables. + for nc_var_name, nc_var in target.items(): + # Check for connectivity variable references, iterating through + # the valid cf roles. + for identity in cls.cf_identities: + nc_var_att = getattr(nc_var, identity, None) + + if nc_var_att is not None: + # UGRID only allows for one of each connectivity cf role. + name = nc_var_att.strip() + if name not in ignore: + if name not in variables: + message = ( + f"Missing CF-UGRID connectivity variable " + f"{name}, referenced by netCDF variable " + f"{nc_var_name}" + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + else: + # Restrict to non-string type i.e. not a + # CFLabelVariable. + if not cf._is_str_dtype(variables[name]): + result[name] = CFUGridConnectivityVariable( + name, variables[name] + ) + else: + message = ( + f"Ignoring variable {name}, identified " + f"as a CF-UGRID connectivity - is a " + f"CF-netCDF label variable." + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + + return result + + +class CFUGridAuxiliaryCoordinateVariable(cf.CFVariable): + """ + A CF-UGRID auxiliary coordinate variable is a CF-netCDF auxiliary + coordinate variable representing the element (node/edge/face/volume) + locations (latitude, longitude or other spatial coordinates, and optional + elevation or other coordinates). These auxiliary coordinate variables will + have length n-elements. + + For elements other than nodes, these auxiliary coordinate variables may + have in turn a ``bounds`` attribute that specifies the bounding coordinates + of the element (thereby duplicating the data in the ``node_coordinates`` + variables). + + Identified by the CF-netCDF variable attribute + 'node_'/'edge_'/'face_'/'volume_coordinates'. + + .. seealso:: + + The UGRID Conventions, https://ugrid-conventions.github.io/ugrid-conventions/ + + """ + + cf_identity = NotImplemented + cf_identities = [ + "node_coordinates", + "edge_coordinates", + "face_coordinates", + "volume_coordinates", + ] + + @classmethod + def identify(cls, variables, ignore=None, target=None, warn=True): + result = {} + ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG + + # Identify any CF-UGRID-relevant auxiliary coordinate variables. + for nc_var_name, nc_var in target.items(): + # Check for UGRID auxiliary coordinate variable references. + for identity in cls.cf_identities: + nc_var_att = getattr(nc_var, identity, None) + + if nc_var_att is not None: + for name in nc_var_att.split(): + if name not in ignore: + if name not in variables: + message = ( + f"Missing CF-netCDF auxiliary coordinate " + f"variable {name}, referenced by netCDF " + f"variable {nc_var_name}" + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + else: + # Restrict to non-string type i.e. not a + # CFLabelVariable. + if not cf._is_str_dtype(variables[name]): + result[ + name + ] = CFUGridAuxiliaryCoordinateVariable( + name, variables[name] + ) + else: + message = ( + f"Ignoring variable {name}, " + f"identified as a CF-netCDF " + f"auxiliary coordinate - is a " + f"CF-netCDF label variable." + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + + return result + + +class CFUGridMeshVariable(cf.CFVariable): + """ + A CF-UGRID mesh variable is a dummy variable for storing topology + information as attributes. The mesh variable has the ``cf_role`` + 'mesh_topology'. + + The UGRID conventions describe define the mesh topology as the + interconnection of various geometrical elements of the mesh. The pure + interconnectivity is independent of georeferencing the individual + geometrical elements, but for the practical applications for which the + UGRID CF extension is defined, coordinate data will always be added. + + Identified by the CF-netCDF variable attribute 'mesh'. + + .. seealso:: + + The UGRID Conventions, https://ugrid-conventions.github.io/ugrid-conventions/ + + """ + + cf_identity = "mesh" + + @classmethod + def identify(cls, variables, ignore=None, target=None, warn=True): + result = {} + ignore, target = cls._identify_common(variables, ignore, target) + # TODO: reconsider logging level when we have consistent practice. + log_level = logging.WARNING if warn else logging.DEBUG + + # Identify all CF-UGRID mesh variables. + for nc_var_name, nc_var in target.items(): + # Check for mesh variable references. + nc_var_att = getattr(nc_var, cls.cf_identity, None) + + if nc_var_att is not None: + # UGRID only allows for 1 mesh per variable. + name = nc_var_att.strip() + if name not in ignore: + if name not in variables: + message = ( + f"Missing CF-UGRID mesh variable {name}, " + f"referenced by netCDF variable {nc_var_name}" + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + else: + # Restrict to non-string type i.e. not a + # CFLabelVariable. + if not cf._is_str_dtype(variables[name]): + result[name] = CFUGridMeshVariable( + name, variables[name] + ) + else: + message = ( + f"Ignoring variable {name}, identified as a " + f"CF-UGRID mesh - is a CF-netCDF label " + f"variable." + ) + logger.log( + level=log_level, + msg=message, + extra=dict(cls=cls.__name__), + ) + + return result + + +class CFUGridGroup(cf.CFGroup): + """ + Represents a collection of 'NetCDF Climate and Forecast (CF) Metadata + Conventions' variables and netCDF global attributes. + + Specialisation of :class:`~iris.fileformats.cf.CFGroup` that includes extra + collections for CF-UGRID-specific variable types. + + """ + + @property + def connectivities(self): + """Collection of CF-UGRID connectivity variables.""" + return self._cf_getter(CFUGridConnectivityVariable) + + @property + def ugrid_coords(self): + """Collection of CF-UGRID-relevant auxiliary coordinate variables.""" + return self._cf_getter(CFUGridAuxiliaryCoordinateVariable) + + @property + def meshes(self): + """Collection of CF-UGRID mesh variables.""" + return self._cf_getter(CFUGridMeshVariable) + + @property + def non_data_variable_names(self): + """ + :class:`set` of the names of the CF-netCDF/CF-UGRID variables that are + not the data pay-load. + + """ + extra_variables = (self.connectivities, self.ugrid_coords, self.meshes) + extra_result = set() + for variable in extra_variables: + extra_result |= set(variable) + return super().non_data_variable_names | extra_result + + +class CFUGridReader(cf.CFReader): + """ + This class allows the contents of a netCDF file to be interpreted according + to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. + + Specialisation of :class:`~iris.fileformats.cf.CFReader` that can also + handle CF-UGRID-specific variable types. + + """ + + _variable_types = cf.CFReader._variable_types + ( + CFUGridConnectivityVariable, + CFUGridAuxiliaryCoordinateVariable, + CFUGridMeshVariable, + ) + + CFGroup = CFUGridGroup + + +############ +# Object construction. +# Helper functions, supporting netcdf.load_cubes ONLY, expected to +# altered/moved when pyke is removed. + + +def _build_aux_coord(coord_var, file_path): + """ + Construct a :class:`~iris.coords.AuxCoord` from a given + :class:`CFUGridAuxiliaryCoordinateVariable`, and guess its mesh axis. + + todo: integrate with standard loading API post-pyke. + + """ + assert isinstance(coord_var, CFUGridAuxiliaryCoordinateVariable) + attributes = {} + attr_units = get_attr_units(coord_var, attributes) + points_data = netcdf._get_cf_var_data(coord_var, file_path) + + # Bounds will not be loaded: + # Bounds may be present, but the UGRID conventions state this would + # always be duplication of the same info provided by the mandatory + # connectivities. + + # Fetch climatological - not allowed for a Mesh, but loading it will + # mean an informative error gets raised. + climatological = False + # TODO: use CF_ATTR_CLIMATOLOGY once re-integrated post-pyke. + attr_climatology = getattr(coord_var, "climatology", None) + if attr_climatology is not None: + climatology_vars = coord_var.cf_group.climatology + climatological = attr_climatology in climatology_vars + + standard_name, long_name, var_name = get_names(coord_var, None, attributes) + coord = AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + # TODO: coord_system + climatological=climatological, + ) + + axis = guess_coord_axis(coord) + if axis is None: + if var_name[-2] == "_": + # Fall back on UGRID var_name convention. + axis = var_name[-1] + else: + message = f"Cannot guess axis for UGRID coord: {var_name} ." + raise ValueError(message) + + return coord, axis + + +def _build_connectivity(connectivity_var, file_path, location_dims): + """ + Construct a :class:`Connectivity` from a given + :class:`CFUGridConnectivityVariable`, and identify the name of its first + dimension. + + todo: integrate with standard loading API post-pyke. + + """ + assert isinstance(connectivity_var, CFUGridConnectivityVariable) + attributes = {} + attr_units = get_attr_units(connectivity_var, attributes) + indices_data = netcdf._get_cf_var_data(connectivity_var, file_path) + + cf_role = connectivity_var.cf_role + start_index = connectivity_var.start_index + + dim_names = connectivity_var.dimensions + # Connectivity arrays must have two dimensions. + assert len(dim_names) == 2 + if dim_names[1] in location_dims: + src_dim = 1 + else: + src_dim = 0 + + standard_name, long_name, var_name = get_names( + connectivity_var, None, attributes + ) + + connectivity = Connectivity( + indices=indices_data, + cf_role=cf_role, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + start_index=start_index, + src_dim=src_dim, + ) + + return connectivity, dim_names[0] + + +def _build_mesh(cf, mesh_var, file_path): + """ + Construct a :class:`Mesh` from a given :class:`CFUGridMeshVariable`. + + todo: integrate with standard loading API post-pyke. + + """ + assert isinstance(mesh_var, CFUGridMeshVariable) + attributes = {} + attr_units = get_attr_units(mesh_var, attributes) + + if hasattr(mesh_var, "volume_node_connectivity"): + topology_dimension = 3 + elif hasattr(mesh_var, "face_node_connectivity"): + topology_dimension = 2 + elif hasattr(mesh_var, "edge_node_connectivity"): + topology_dimension = 1 + else: + # Nodes only. We aren't sure yet whether this is a valid option. + topology_dimension = 0 + + if not hasattr(mesh_var, "topology_dimension"): + msg = ( + f"Mesh variable {mesh_var.cf_name} has no 'topology_dimension'" + f" : *Assuming* topology_dimension={topology_dimension}" + ", consistent with the attached connectivities." + ) + # TODO: reconsider logging level when we have consistent practice. + logger.warning(msg, extra=dict(cls=None)) + else: + quoted_topology_dimension = mesh_var.topology_dimension + if quoted_topology_dimension != topology_dimension: + msg = ( + f"*Assuming* 'topology_dimension'={topology_dimension}" + f", from the attached connectivities of the mesh variable " + f"{mesh_var.cf_name}. However, " + f"{mesh_var.cf_name}:topology_dimension = " + f"{quoted_topology_dimension}" + " -- ignoring this as it is inconsistent." + ) + # TODO: reconsider logging level when we have consistent practice. + logger.warning(msg=msg, extra=dict(cls=None)) + + node_dimension = None + edge_dimension = getattr(mesh_var, "edge_dimension", None) + face_dimension = getattr(mesh_var, "face_dimension", None) + + node_coord_args = [] + edge_coord_args = [] + face_coord_args = [] + for coord_var in mesh_var.cf_group.ugrid_coords.values(): + coord_and_axis = _build_aux_coord(coord_var, file_path) + coord = coord_and_axis[0] + + if coord.var_name in mesh_var.node_coordinates.split(): + node_coord_args.append(coord_and_axis) + node_dimension = coord_var.dimensions[0] + elif ( + coord.var_name in getattr(mesh_var, "edge_coordinates", "").split() + ): + edge_coord_args.append(coord_and_axis) + elif ( + coord.var_name in getattr(mesh_var, "face_coordinates", "").split() + ): + face_coord_args.append(coord_and_axis) + # TODO: support volume_coordinates. + else: + message = ( + f"Invalid UGRID coord: {coord.var_name} . Must be either a" + f"node_, edge_ or face_coordinate." + ) + raise ValueError(message) + + if node_dimension is None: + message = ( + "'node_dimension' could not be identified from mesh node " + "coordinates." + ) + raise ValueError(message) + + # Used for detecting transposed connectivities. + location_dims = (edge_dimension, face_dimension) + connectivity_args = [] + for connectivity_var in mesh_var.cf_group.connectivities.values(): + connectivity, first_dim_name = _build_connectivity( + connectivity_var, file_path, location_dims + ) + assert connectivity.var_name == getattr(mesh_var, connectivity.cf_role) + connectivity_args.append(connectivity) + + # If the mesh_var has not supplied the dimension name, it is safe to + # fall back on the connectivity's first dimension's name. + if edge_dimension is None and connectivity.src_location == "edge": + edge_dimension = first_dim_name + if face_dimension is None and connectivity.src_location == "face": + face_dimension = first_dim_name + + standard_name, long_name, var_name = get_names(mesh_var, None, attributes) + + mesh = Mesh( + topology_dimension=topology_dimension, + node_coords_and_axes=node_coord_args, + connectivities=connectivity_args, + edge_coords_and_axes=edge_coord_args, + face_coords_and_axes=face_coord_args, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + node_dimension=node_dimension, + edge_dimension=edge_dimension, + face_dimension=face_dimension, + ) + assert mesh.cf_role == mesh_var.cf_role + + mesh_elements = ( + list(mesh.all_coords) + list(mesh.all_connectivities) + [mesh] + ) + mesh_elements = filter(None, mesh_elements) + for iris_object in mesh_elements: + netcdf._add_unused_attributes( + iris_object, cf.cf_group[iris_object.var_name] + ) + + return mesh + + +def _build_mesh_coords(mesh, cf_var): + """ + Construct a tuple of :class:`MeshCoord` using from a given :class:`Mesh` + and :class:`~iris.fileformats.cf.CFVariable`. + + todo: integrate with standard loading API post-pyke. + + """ + # Identify the cube's mesh dimension, for attaching MeshCoords. + locations_dimensions = { + "node": mesh.node_dimension, + "edge": mesh.edge_dimension, + "face": mesh.face_dimension, + } + mesh_dim_name = locations_dimensions[cf_var.location] + # (Only expecting 1 mesh dimension per cf_var). + mesh_dim = cf_var.dimensions.index(mesh_dim_name) + + mesh_coords = mesh.to_MeshCoords(location=cf_var.location) + return mesh_coords, mesh_dim + + +# END of loading section. +############################################################################### diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index edf952765e..1429e4f65e 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -947,6 +947,28 @@ def cell_measures(self): """Collection of CF-netCDF measure variables.""" return self._cf_getter(CFMeasureVariable) + @property + def non_data_variable_names(self): + """ + :class:`set` of the names of the CF-netCDF variables that are not + the data pay-load. + + """ + non_data_variables = ( + self.ancillary_variables, + self.auxiliary_coordinates, + self.bounds, + self.climatology, + self.coordinates, + self.grid_mappings, + self.labels, + self.cell_measures, + ) + result = set() + for variable in non_data_variables: + result |= set(variable) + return result + def keys(self): """Return the names of all the CF-netCDF variables in the group.""" return self._cf_variables.keys() @@ -1006,22 +1028,26 @@ class CFReader: """ + # All CF variable types EXCEPT for the "special cases" of + # CFDataVariable, CFCoordinateVariable and _CFFormulaTermsVariable. + _variable_types = ( + CFAncillaryDataVariable, + CFAuxiliaryCoordinateVariable, + CFBoundaryVariable, + CFClimatologyVariable, + CFGridMappingVariable, + CFLabelVariable, + CFMeasureVariable, + ) + + # TODO: remove once iris.experimental.ugrid.CFUGridReader is folded in. + CFGroup = CFGroup + def __init__(self, filename, warn=False, monotonic=False): self._filename = os.path.expanduser(filename) - # All CF variable types EXCEPT for the "special cases" of - # CFDataVariable, CFCoordinateVariable and _CFFormulaTermsVariable. - self._variable_types = ( - CFAncillaryDataVariable, - CFAuxiliaryCoordinateVariable, - CFBoundaryVariable, - CFClimatologyVariable, - CFGridMappingVariable, - CFLabelVariable, - CFMeasureVariable, - ) #: Collection of CF-netCDF variables associated with this netCDF file - self.cf_group = CFGroup() + self.cf_group = self.CFGroup() self._dataset = netCDF4.Dataset(self._filename, mode="r") @@ -1096,15 +1122,7 @@ def _translate(self): # Determine the CF data variables. data_variable_names = ( - set(netcdf_variable_names) - - set(self.cf_group.ancillary_variables) - - set(self.cf_group.auxiliary_coordinates) - - set(self.cf_group.bounds) - - set(self.cf_group.climatology) - - set(self.cf_group.coordinates) - - set(self.cf_group.grid_mappings) - - set(self.cf_group.labels) - - set(self.cf_group.cell_measures) + set(netcdf_variable_names) - self.cf_group.non_data_variable_names ) for name in data_variable_names: @@ -1116,17 +1134,28 @@ def _build_cf_groups(self): """Build the first order relationships between CF-netCDF variables.""" def _build(cf_variable): + # TODO: isinstance(cf_variable, UGridMeshVariable) + # UGridMeshVariable currently in experimental.ugrid - circular import. + is_mesh_var = cf_variable.cf_identity == "mesh" + ugrid_coord_names = [] + ugrid_coords = getattr(self.cf_group, "ugrid_coords", None) + if ugrid_coords is not None: + ugrid_coord_names = list(ugrid_coords.keys()) + coordinate_names = list(self.cf_group.coordinates.keys()) - cf_group = CFGroup() + cf_group = self.CFGroup() # Build CF variable relationships. for variable_type in self._variable_types: - # Prevent grid mapping variables being mis-identified as - # CF coordinate variables. - if issubclass(variable_type, CFGridMappingVariable): - ignore = None - else: - ignore = coordinate_names + ignore = [] + # Avoid UGridAuxiliaryCoordinateVariables also being + # processed as CFAuxiliaryCoordinateVariables. + if not is_mesh_var: + ignore += ugrid_coord_names + # Prevent grid mapping variables being mis-identified as CF coordinate variables. + if not issubclass(variable_type, CFGridMappingVariable): + ignore += coordinate_names + match = variable_type.identify( self._dataset.variables, ignore=ignore, @@ -1135,7 +1164,8 @@ def _build(cf_variable): ) # Sanity check dimensionality coverage. for cf_name, cf_var in match.items(): - if cf_var.spans(cf_variable): + # No span check is necessary if variable is attached to a mesh. + if is_mesh_var or cf_var.spans(cf_variable): cf_group[cf_name] = self.cf_group[cf_name] else: # Register the ignored variable. diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 14dbab8054..a4727ea624 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -47,6 +47,9 @@ # Show actions activation statistics. DEBUG = False +# Configure the logger. +logger = iris.config.get_logger(__name__) + # Standard CML spatio-temporal axis names. SPATIO_TEMPORAL_AXES = ["t", "z", "y", "x"] @@ -515,6 +518,22 @@ def _set_attributes(attributes, key, value): attributes[str(key)] = value +def _add_unused_attributes(iris_object, cf_var): + """ + Populate the attributes of a cf element with the "unused" attributes + from the associated CF-netCDF variable. That is, all those that aren't CF + reserved terms. + + """ + + def attribute_predicate(item): + return item[0] not in _CF_ATTRS + + tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) + for attr_name, attr_value in tmpvar: + _set_attributes(iris_object.attributes, attr_name, attr_value) + + def _get_actual_dtype(cf_var): # Figure out what the eventual data type will be after any scale/offset # transforms. @@ -593,22 +612,12 @@ def _load_cube(engine, cf, cf_var, filename): # It also records various other info on the engine, to be processed later. engine.activate() - # Having run the rules, now populate the attributes of all the cf elements with the - # "unused" attributes from the associated CF-netCDF variable. - # That is, all those that aren't CF reserved terms. - def attribute_predicate(item): - return item[0] not in _CF_ATTRS - - def add_unused_attributes(iris_object, cf_var): - tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) - for attr_name, attr_value in tmpvar: - _set_attributes(iris_object.attributes, attr_name, attr_value) - + # Having run the rules, now add the "unused" attributes to each cf element. def fix_attributes_all_elements(role_name): elements_and_names = engine.cube_parts.get(role_name, []) for iris_object, cf_var_name in elements_and_names: - add_unused_attributes(iris_object, cf.cf_group[cf_var_name]) + _add_unused_attributes(iris_object, cf.cf_group[cf_var_name]) # Populate the attributes of all coordinates, cell-measures and ancillary-vars. fix_attributes_all_elements("coordinates") @@ -616,7 +625,7 @@ def fix_attributes_all_elements(role_name): fix_attributes_all_elements("cell_measures") # Also populate attributes of the top-level cube itself. - add_unused_attributes(cube, cf_var) + _add_unused_attributes(cube, cf_var) # Work out reference names for all the coords. names = { @@ -774,9 +783,18 @@ def load_cubes(filenames, callback=None): Function which can be passed on to :func:`iris.io.run_callback`. Returns: - Generator of loaded NetCDF :class:`iris.cubes.Cube`. + Generator of loaded NetCDF :class:`iris.cube.Cube`. """ + # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded + # into standard behaviour. + # Deferred import to avoid circular imports. + from iris.experimental.ugrid import ( + PARSE_UGRID_ON_LOAD, + CFUGridReader, + _build_mesh, + _build_mesh_coords, + ) from iris.io import run_callback # Create an actions engine. @@ -787,15 +805,53 @@ def load_cubes(filenames, callback=None): for filename in filenames: # Ingest the netCDF file. - cf = iris.fileformats.cf.CFReader(filename) + meshes = {} + if PARSE_UGRID_ON_LOAD: + cf = CFUGridReader(filename) + + # Mesh instances are shared between file phenomena. + # TODO: more sophisticated Mesh sharing between files. + # TODO: access external Mesh cache? + mesh_vars = cf.cf_group.meshes + meshes = { + name: _build_mesh(cf, var, filename) + for name, var in mesh_vars.items() + } + else: + cf = iris.fileformats.cf.CFReader(filename) # Process each CF data variable. data_variables = list(cf.cf_group.data_variables.values()) + list( cf.cf_group.promoted.values() ) for cf_var in data_variables: + # cf_var-specific mesh handling, if a mesh is present. + # Build the mesh_coords *before* loading the cube - avoids + # mesh-related attributes being picked up by + # _add_unused_attributes(). + mesh_name = None + mesh = None + mesh_coords, mesh_dim = [], None + if PARSE_UGRID_ON_LOAD: + mesh_name = getattr(cf_var, "mesh", None) + if mesh_name is not None: + try: + mesh = meshes[mesh_name] + except KeyError: + message = ( + f"File does not contain mesh: '{mesh_name}' - " + f"referenced by variable: '{cf_var.cf_name}' ." + ) + logger.debug(message) + if mesh is not None: + mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var) + cube = _load_cube(engine, cf, cf_var, filename) + # Attach the mesh (if present) to the cube. + for mesh_coord in mesh_coords: + cube.add_aux_coord(mesh_coord, mesh_dim) + # Process any associated formula terms and attach # the corresponding AuxCoordFactory. try: @@ -1425,11 +1481,11 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): or cf_var.standard_name != std_name ): # TODO: We need to resolve this corner-case where - # the dimensionless vertical coordinate containing the - # formula_terms is a dimension coordinate of the - # associated cube and a new alternatively named - # dimensionless vertical coordinate is required with - # new formula_terms and a renamed dimension. + # the dimensionless vertical coordinate containing + # the formula_terms is a dimension coordinate of + # the associated cube and a new alternatively named + # dimensionless vertical coordinate is required + # with new formula_terms and a renamed dimension. if cf_name in dimension_names: msg = ( "Unable to create dimensonless vertical " diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 5e1fd4e21e..7b88065d38 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -607,6 +607,42 @@ def assertWarnsRegexp(self, expected_regexp=""): msg = msg.format(expected_regexp) self.assertTrue(matches, msg) + @contextlib.contextmanager + def assertLogs(self, logger=None, level=None, msg_regex=None): + """ + An extended version of the usual :meth:`unittest.TestCase.assertLogs`, + which also exercises the logger's message formatting. + + Also adds the ``msg_regex`` kwarg: + If used, check that the result is a single message of the specified + level, and that it matches this regex. + + The inherited version of this method temporarily *replaces* the logger + in order to capture log records generated within the context. + However, in doing so it prevents any messages from being formatted + by the original logger. + This version first calls the original method, but then *also* exercises + the message formatters of all the logger's handlers, just to check that + there are no formatting errors. + + """ + # Invoke the standard assertLogs behaviour. + assertlogging_context = super().assertLogs(logger, level) + with assertlogging_context as watcher: + # Run the caller context, as per original method. + yield watcher + # Check for any formatting errors by running all the formatters. + for record in watcher.records: + for handler in assertlogging_context.logger.handlers: + handler.format(record) + + # Check message, if requested. + if msg_regex: + self.assertEqual(len(watcher.records), 1) + rec = watcher.records[0] + self.assertEqual(level, rec.levelname) + self.assertRegex(rec.msg, msg_regex) + @contextlib.contextmanager def assertNoWarningsRegexp(self, expected_regexp=""): # Check that no warning matching the given expression is raised. diff --git a/lib/iris/tests/experimental/ugrid/test_ugrid.py b/lib/iris/tests/experimental/ugrid/test_ugrid.py deleted file mode 100644 index c07c6bc7be..0000000000 --- a/lib/iris/tests/experimental/ugrid/test_ugrid.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test the :func:`iris.experimental.ugrid.ugrid` function. - -""" - -import iris.tests as tests # isort:skip - -import unittest - -# Import pyugrid if installed, else fail quietly + disable all the tests. -try: - import pyugrid -except (ImportError, AttributeError): - pyugrid = None -skip_pyugrid = unittest.skipIf( - condition=pyugrid is None, - reason="Requires pyugrid, which is not available.", -) - -import iris.experimental.ugrid - -data_path = ( - "NetCDF", - "ugrid", -) -file21 = "21_triangle_example.nc" -long_name = "volume flux between cells" - - -@skip_pyugrid -@tests.skip_data -class TestUgrid(tests.IrisTest): - def test_ugrid(self): - path = tests.get_data_path(data_path + (file21,)) - cube = iris.experimental.ugrid.ugrid(path, long_name) - self.assertTrue(hasattr(cube, "mesh")) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py new file mode 100644 index 0000000000..1503225d6f --- /dev/null +++ b/lib/iris/tests/integration/experimental/test_ugrid_load.py @@ -0,0 +1,183 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Integration tests for NetCDF-UGRID file loading. + +todo: fold these tests into netcdf tests when experimental.ugrid is folded into + standard behaviour. + +""" + +from collections.abc import Iterable + +from iris import Constraint, load +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, logger + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.stock.netcdf import ( + _file_from_cdl_template as create_file_from_cdl_template, +) +from iris.tests.unit.tests.stock.test_netcdf import XIOSFileMixin + + +def ugrid_load(uris, constraints=None, callback=None): + # TODO: remove constraint once files no longer have orphan connectivities. + orphan_connectivities = ( + "Mesh2d_half_levels_edge_face_links", + "Mesh2d_half_levels_face_links", + "Mesh2d_half_levels_face_edges", + "Mesh2d_full_levels_edge_face_links", + "Mesh2d_full_levels_face_links", + "Mesh2d_full_levels_face_edges", + ) + filter_orphan_connectivities = Constraint( + cube_func=lambda cube: cube.var_name not in orphan_connectivities + ) + if constraints is None: + constraints = filter_orphan_connectivities + else: + if not isinstance(constraints, Iterable): + constraints = [constraints] + constraints.append(filter_orphan_connectivities) + + with PARSE_UGRID_ON_LOAD.context(): + return load(uris, constraints, callback) + + +@tests.skip_data +class TestBasic(tests.IrisTest): + def common_test(self, load_filename, assert_filename): + cube_list = ugrid_load( + tests.get_data_path( + ["NetCDF", "unstructured_grid", load_filename] + ), + ) + self.assertEqual(1, len(cube_list)) + cube = cube_list[0] + self.assertCML(cube, ["experimental", "ugrid", assert_filename]) + + def test_2D_1t_face_half_levels(self): + self.common_test( + "lfric_ngvat_2D_1t_face_half_levels_main_conv_rain.nc", + "2D_1t_face_half_levels.cml", + ) + + def test_3D_1t_face_half_levels(self): + self.common_test( + "lfric_ngvat_3D_1t_half_level_face_grid_derived_theta_in_w3.nc", + "3D_1t_face_half_levels.cml", + ) + + def test_3D_1t_face_full_levels(self): + self.common_test( + "lfric_ngvat_3D_1t_full_level_face_grid_main_area_fraction_unit1.nc", + "3D_1t_face_full_levels.cml", + ) + + def test_2D_72t_face_half_levels(self): + self.common_test( + "lfric_ngvat_2D_72t_face_half_levels_main_conv_rain.nc", + "2D_72t_face_half_levels.cml", + ) + + def test_3D_snow_pseudo_levels(self): + self.common_test( + "lfric_ngvat_3D_snow_pseudo_levels_1t_face_half_levels_main_snow_layer_temp.nc", + "3D_snow_pseudo_levels.cml", + ) + + def test_3D_soil_pseudo_levels(self): + self.common_test( + "lfric_ngvat_3D_soil_pseudo_levels_1t_face_half_levels_main_soil_temperature.nc", + "3D_soil_pseudo_levels.cml", + ) + + def test_3D_tile_pseudo_levels(self): + self.common_test( + "lfric_ngvat_3D_tile_pseudo_levels_1t_face_half_levels_main_sw_up_tile.nc", + "3D_tile_pseudo_levels.cml", + ) + + def test_3D_veg_pseudo_levels(self): + self.common_test( + "lfric_ngvat_3D_veg_pseudo_levels_1t_face_half_levels_main_snowpack_density.nc", + "3D_veg_pseudo_levels.cml", + ) + + +@tests.skip_data +class TestMultiplePhenomena(tests.IrisTest): + def test_multiple_phenomena(self): + cube_list = ugrid_load( + tests.get_data_path( + ["NetCDF", "unstructured_grid", "lfric_surface_mean.nc"] + ), + ) + self.assertCML( + cube_list, ("experimental", "ugrid", "surface_mean.cml") + ) + + +class TestTolerantLoading(XIOSFileMixin): + # N.B. using parts of the XIOS-like file integration testing, to make + # temporary netcdf files from stored CDL templates. + @classmethod + def setUpClass(cls): + super().setUpClass() # create cls.temp_dir = dir for test files + + @classmethod + def tearDownClass(cls): + super().setUpClass() # destroy temp dir + + # Create a testfile according to testcase-specific arguments. + # NOTE: with this, parent "create_synthetic_test_cube" can load a cube. + def create_synthetic_file(self, **create_kwargs): + template_name = create_kwargs["template"] # required kwarg + testfile_name = "tmp_netcdf" + template_subs = dict( + NUM_NODES=7, NUM_FACES=3, DATASET_NAME=testfile_name + ) + kwarg_subs = create_kwargs.get("subs", {}) # optional kwarg + template_subs.update(kwarg_subs) + filepath = create_file_from_cdl_template( + temp_file_dir=self.temp_dir, + dataset_name=testfile_name, + dataset_type=template_name, + template_subs=template_subs, + ) + return str(filepath) # N.B. Path object not usable in iris.load + + def test_mesh_bad_topology_dimension(self): + # Check that the load generates a suitable warning. + log_regex = r"topology_dimension.* ignoring" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + template = "minimal_bad_topology_dim" + dim_line = "mesh_var:topology_dimension = 1 ;" # which is wrong ! + cube = self.create_synthetic_test_cube( + template=template, subs=dict(TOPOLOGY_DIM_DEFINITION=dim_line) + ) + + # Check that the result has topology-dimension of 2 (not 1). + self.assertEqual(cube.mesh.topology_dimension, 2) + + def test_mesh_no_topology_dimension(self): + # Check that the load generates a suitable warning. + log_regex = r"Mesh variable.* has no 'topology_dimension'" + with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): + template = "minimal_bad_topology_dim" + dim_line = "" # don't create ANY topology_dimension property + cube = self.create_synthetic_test_cube( + template=template, subs=dict(TOPOLOGY_DIM_DEFINITION=dim_line) + ) + + # Check that the result has the correct topology-dimension value. + self.assertEqual(cube.mesh.topology_dimension, 2) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml new file mode 100644 index 0000000000..be79f3ff57 --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml new file mode 100644 index 0000000000..568c835e97 --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml new file mode 100644 index 0000000000..6d7873daaa --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml new file mode 100644 index 0000000000..b664e3cf6f --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml new file mode 100644 index 0000000000..b30d443495 --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml new file mode 100644 index 0000000000..157755298d --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml new file mode 100644 index 0000000000..a9eba1a80d --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml new file mode 100644 index 0000000000..e90c048803 --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml new file mode 100644 index 0000000000..368b3508e3 --- /dev/null +++ b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml @@ -0,0 +1,921 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index 5cfacdc794..a46a5510f6 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -4,7 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -A collection of routines which create standard Cubes for test purposes. +A collection of routines which create standard Cubes/files for test purposes. """ import iris.tests as tests # isort:skip diff --git a/lib/iris/tests/stock/file_headers/README.md b/lib/iris/tests/stock/file_headers/README.md new file mode 100644 index 0000000000..e725b17027 --- /dev/null +++ b/lib/iris/tests/stock/file_headers/README.md @@ -0,0 +1,6 @@ +A directory of text files containing file header strings that include +placeholders - designed to be interpreted by +[Python's string.Template](https://docs.python.org/3/library/string.html#string.Template). + +* `.cdl` files: used by [`tests.stock.netcdf`](../netcdf.py) to create +synthetic NetCDF files of the required dimensions. diff --git a/lib/iris/tests/stock/file_headers/minimal_bad_topology_dim.cdl b/lib/iris/tests/stock/file_headers/minimal_bad_topology_dim.cdl new file mode 100644 index 0000000000..44f3ef18f8 --- /dev/null +++ b/lib/iris/tests/stock/file_headers/minimal_bad_topology_dim.cdl @@ -0,0 +1,38 @@ +// Tolerant loading test example : the mesh has the wrong 'topology_dimension' +// NOTE: *not* truly minimal, as we cannot (yet) handle data with no face coords. +netcdf ${DATASET_NAME} { +dimensions: + NODES = ${NUM_NODES} ; + FACES = ${NUM_FACES} ; + FACE_CORNERS = 4 ; +variables: + int mesh_var ; + mesh_var:cf_role = "mesh_topology" ; + ${TOPOLOGY_DIM_DEFINITION} + mesh_var:node_coordinates = "mesh_node_x mesh_node_y" ; + mesh_var:face_node_connectivity = "mesh_face_nodes" ; + mesh_var:face_coordinates = "mesh_face_x mesh_face_y" ; + float mesh_node_x(NODES) ; + mesh_node_x:standard_name = "longitude" ; + mesh_node_x:long_name = "Longitude of mesh nodes." ; + mesh_node_x:units = "degrees_east" ; + float mesh_node_y(NODES) ; + mesh_node_y:standard_name = "latitude" ; + mesh_node_y:long_name = "Latitude of mesh nodes." ; + mesh_node_y:units = "degrees_north" ; + float mesh_face_x(FACES) ; + mesh_face_x:standard_name = "longitude" ; + mesh_face_x:long_name = "Longitude of mesh nodes." ; + mesh_face_x:units = "degrees_east" ; + float mesh_face_y(FACES) ; + mesh_face_y:standard_name = "latitude" ; + mesh_face_y:long_name = "Latitude of mesh nodes." ; + mesh_face_y:units = "degrees_north" ; + int mesh_face_nodes(FACES, FACE_CORNERS) ; + mesh_face_nodes:cf_role = "face_node_connectivity" ; + mesh_face_nodes:long_name = "Maps every face to its corner nodes." ; + mesh_face_nodes:start_index = 0 ; + float data_var(FACES) ; + data_var:mesh = "mesh_var" ; + data_var:location = "face" ; +} diff --git a/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl new file mode 100644 index 0000000000..b135546f2d --- /dev/null +++ b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl @@ -0,0 +1,58 @@ +// For now: have omitted all optional information (on edges + connectivity), +// *except* for face locations. +netcdf ${DATASET_NAME} { +dimensions: + axis_nbounds = 2 ; + Two = 2 ; + nMesh2d_half_levels_node = ${NUM_NODES} ; + nMesh2d_half_levels_face = ${NUM_FACES} ; + nMesh2d_half_levels_vertex = 4 ; + time_counter = UNLIMITED ; // (1 currently) +variables: + int Mesh2d_half_levels ; + Mesh2d_half_levels:cf_role = "mesh_topology" ; + Mesh2d_half_levels:long_name = "Topology data of 2D unstructured mesh" ; + Mesh2d_half_levels:topology_dimension = 2 ; + Mesh2d_half_levels:node_coordinates = "Mesh2d_half_levels_node_x Mesh2d_half_levels_node_y" ; + Mesh2d_half_levels:face_coordinates = "Mesh2d_half_levels_face_x Mesh2d_half_levels_face_y" ; + Mesh2d_half_levels:face_node_connectivity = "Mesh2d_half_levels_face_nodes" ; + float Mesh2d_half_levels_node_x(nMesh2d_half_levels_node) ; + Mesh2d_half_levels_node_x:standard_name = "longitude" ; + Mesh2d_half_levels_node_x:long_name = "Longitude of mesh nodes." ; + Mesh2d_half_levels_node_x:units = "degrees_east" ; + float Mesh2d_half_levels_node_y(nMesh2d_half_levels_node) ; + Mesh2d_half_levels_node_y:standard_name = "latitude" ; + Mesh2d_half_levels_node_y:long_name = "Latitude of mesh nodes." ; + Mesh2d_half_levels_node_y:units = "degrees_north" ; + float Mesh2d_half_levels_face_x(nMesh2d_half_levels_face) ; + Mesh2d_half_levels_face_x:standard_name = "longitude" ; + Mesh2d_half_levels_face_x:long_name = "Characteristic longitude of mesh faces." ; + Mesh2d_half_levels_face_x:units = "degrees_east" ; + float Mesh2d_half_levels_face_y(nMesh2d_half_levels_face) ; + Mesh2d_half_levels_face_y:standard_name = "latitude" ; + Mesh2d_half_levels_face_y:long_name = "Characteristic latitude of mesh faces." ; + Mesh2d_half_levels_face_y:units = "degrees_north" ; + int Mesh2d_half_levels_face_nodes(nMesh2d_half_levels_face, nMesh2d_half_levels_vertex) ; + Mesh2d_half_levels_face_nodes:cf_role = "face_node_connectivity" ; + Mesh2d_half_levels_face_nodes:long_name = "Maps every face to its corner nodes." ; + Mesh2d_half_levels_face_nodes:start_index = 0 ; + double time_instant(time_counter) ; + time_instant:standard_name = "time" ; + time_instant:long_name = "Time axis" ; + time_instant:calendar = "gregorian" ; + time_instant:units = "seconds since 2016-01-01 15:00:00" ; + time_instant:time_origin = "2016-01-01 15:00:00" ; + time_instant:bounds = "time_instant_bounds" ; + double time_instant_bounds(time_counter, axis_nbounds) ; + double thing(time_counter, nMesh2d_half_levels_face) ; +// Fictional phenomenon to simplify and avoid un-realistic data/units in the required file. + thing:long_name = "thingness" ; + thing:mesh = "Mesh2d_half_levels" ; + thing:location = "face" ; + thing:coordinates = "time_instant Mesh2d_half_levels_face_y Mesh2d_half_levels_face_x" ; + +// global attributes: + :name = "${DATASET_NAME}" ; +// original name = "lfric_ngvat_2D_1t_face_half_levels_main_conv_rain" + :Conventions = "UGRID" ; +} diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl new file mode 100644 index 0000000000..e4f32de7b7 --- /dev/null +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl @@ -0,0 +1,61 @@ +// For now: have omitted all optional information (on edges + connectivity), +// *except* for face locations. +netcdf ${DATASET_NAME} { +dimensions: + axis_nbounds = 2 ; + Two = 2 ; + nMesh2d_full_levels_node = ${NUM_NODES} ; + nMesh2d_full_levels_face = ${NUM_FACES} ; + nMesh2d_full_levels_vertex = 4 ; + full_levels = ${NUM_LEVELS} ; + time_counter = UNLIMITED ; // (1 currently) +variables: + int Mesh2d_full_levels ; + Mesh2d_full_levels:cf_role = "mesh_topology" ; + Mesh2d_full_levels:long_name = "Topology data of 2D unstructured mesh" ; + Mesh2d_full_levels:topology_dimension = 2 ; + Mesh2d_full_levels:node_coordinates = "Mesh2d_full_levels_node_x Mesh2d_full_levels_node_y" ; + Mesh2d_full_levels:face_coordinates = "Mesh2d_full_levels_face_x Mesh2d_full_levels_face_y" ; + Mesh2d_full_levels:face_node_connectivity = "Mesh2d_full_levels_face_nodes" ; + float Mesh2d_full_levels_node_x(nMesh2d_full_levels_node) ; + Mesh2d_full_levels_node_x:standard_name = "longitude" ; + Mesh2d_full_levels_node_x:long_name = "Longitude of mesh nodes." ; + Mesh2d_full_levels_node_x:units = "degrees_east" ; + float Mesh2d_full_levels_node_y(nMesh2d_full_levels_node) ; + Mesh2d_full_levels_node_y:standard_name = "latitude" ; + Mesh2d_full_levels_node_y:long_name = "Latitude of mesh nodes." ; + Mesh2d_full_levels_node_y:units = "degrees_north" ; + float Mesh2d_full_levels_face_x(nMesh2d_full_levels_face) ; + Mesh2d_full_levels_face_x:standard_name = "longitude" ; + Mesh2d_full_levels_face_x:long_name = "Characteristic longitude of mesh faces." ; + Mesh2d_full_levels_face_x:units = "degrees_east" ; + float Mesh2d_full_levels_face_y(nMesh2d_full_levels_face) ; + Mesh2d_full_levels_face_y:standard_name = "latitude" ; + Mesh2d_full_levels_face_y:long_name = "Characteristic latitude of mesh faces." ; + Mesh2d_full_levels_face_y:units = "degrees_north" ; + int Mesh2d_full_levels_face_nodes(nMesh2d_full_levels_face, nMesh2d_full_levels_vertex) ; + Mesh2d_full_levels_face_nodes:cf_role = "face_node_connectivity" ; + Mesh2d_full_levels_face_nodes:long_name = "Maps every face to its corner nodes." ; + Mesh2d_full_levels_face_nodes:start_index = 0 ; + float full_levels(full_levels) ; + full_levels:name = "full_levels" ; + double time_instant(time_counter) ; + time_instant:standard_name = "time" ; + time_instant:long_name = "Time axis" ; + time_instant:calendar = "gregorian" ; + time_instant:units = "seconds since 2016-01-01 15:00:00" ; + time_instant:time_origin = "2016-01-01 15:00:00" ; + time_instant:bounds = "time_instant_bounds" ; + double time_instant_bounds(time_counter, axis_nbounds) ; + double thing(time_counter, full_levels, nMesh2d_full_levels_face) ; +// Fictional phenomenon to simplify and avoid un-realistic data/units in the required file. + thing:long_name = "thingness" ; + thing:mesh = "Mesh2d_full_levels" ; + thing:location = "face" ; + thing:coordinates = "time_instant Mesh2d_full_levels_face_y Mesh2d_full_levels_face_x" ; + +// global attributes: + :name = "${DATASET_NAME}" ; +// original name = "lfric_ngvat_3D_1t_full_level_face_grid_main_u3" + :Conventions = "UGRID" ; +} diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl new file mode 100644 index 0000000000..a193dbe451 --- /dev/null +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl @@ -0,0 +1,61 @@ +// For now: have omitted all optional information (on edges + connectivity), +// *except* for face locations. +netcdf ${DATASET_NAME} { +dimensions: + axis_nbounds = 2 ; + Two = 2 ; + nMesh2d_half_levels_node = ${NUM_NODES} ; + nMesh2d_half_levels_face = ${NUM_FACES} ; + nMesh2d_half_levels_vertex = 4 ; + half_levels = ${NUM_LEVELS} ; + time_counter = UNLIMITED ; // (1 currently) +variables: + int Mesh2d_half_levels ; + Mesh2d_half_levels:cf_role = "mesh_topology" ; + Mesh2d_half_levels:long_name = "Topology data of 2D unstructured mesh" ; + Mesh2d_half_levels:topology_dimension = 2 ; + Mesh2d_half_levels:node_coordinates = "Mesh2d_half_levels_node_x Mesh2d_half_levels_node_y" ; + Mesh2d_half_levels:face_coordinates = "Mesh2d_half_levels_face_x Mesh2d_half_levels_face_y" ; + Mesh2d_half_levels:face_node_connectivity = "Mesh2d_half_levels_face_nodes" ; + float Mesh2d_half_levels_node_x(nMesh2d_half_levels_node) ; + Mesh2d_half_levels_node_x:standard_name = "longitude" ; + Mesh2d_half_levels_node_x:long_name = "Longitude of mesh nodes." ; + Mesh2d_half_levels_node_x:units = "degrees_east" ; + float Mesh2d_half_levels_node_y(nMesh2d_half_levels_node) ; + Mesh2d_half_levels_node_y:standard_name = "latitude" ; + Mesh2d_half_levels_node_y:long_name = "Latitude of mesh nodes." ; + Mesh2d_half_levels_node_y:units = "degrees_north" ; + float Mesh2d_half_levels_face_x(nMesh2d_half_levels_face) ; + Mesh2d_half_levels_face_x:standard_name = "longitude" ; + Mesh2d_half_levels_face_x:long_name = "Characteristic longitude of mesh faces." ; + Mesh2d_half_levels_face_x:units = "degrees_east" ; + float Mesh2d_half_levels_face_y(nMesh2d_half_levels_face) ; + Mesh2d_half_levels_face_y:standard_name = "latitude" ; + Mesh2d_half_levels_face_y:long_name = "Characteristic latitude of mesh faces." ; + Mesh2d_half_levels_face_y:units = "degrees_north" ; + int Mesh2d_half_levels_face_nodes(nMesh2d_half_levels_face, nMesh2d_half_levels_vertex) ; + Mesh2d_half_levels_face_nodes:cf_role = "face_node_connectivity" ; + Mesh2d_half_levels_face_nodes:long_name = "Maps every face to its corner nodes." ; + Mesh2d_half_levels_face_nodes:start_index = 0 ; + float half_levels(half_levels) ; + half_levels:name = "half_levels" ; + double time_instant(time_counter) ; + time_instant:standard_name = "time" ; + time_instant:long_name = "Time axis" ; + time_instant:calendar = "gregorian" ; + time_instant:units = "seconds since 2016-01-01 15:00:00" ; + time_instant:time_origin = "2016-01-01 15:00:00" ; + time_instant:bounds = "time_instant_bounds" ; + double time_instant_bounds(time_counter, axis_nbounds) ; + double thing(time_counter, half_levels, nMesh2d_half_levels_face) ; +// Fictional phenomenon to simplify and avoid un-realistic data/units in the required file. + thing:long_name = "thingness" ; + thing:mesh = "Mesh2d_half_levels" ; + thing:location = "face" ; + thing:coordinates = "time_instant Mesh2d_half_levels_face_y Mesh2d_half_levels_face_x" ; + +// global attributes: + :name = "${DATASET_NAME}" ; +// original name = "lfric_ngvat_3D_1t_half_level_face_grid_derived_theta_in_w3" + :Conventions = "UGRID" ; +} diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py new file mode 100644 index 0000000000..f3ce783fad --- /dev/null +++ b/lib/iris/tests/stock/mesh.py @@ -0,0 +1,159 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Helper functions making objects for unstructured mesh testing.""" + + +import numpy as np + +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube +from iris.experimental.ugrid import Connectivity, Mesh, MeshCoord + +# Default creation controls for creating a test Mesh. +# Note: we're not creating any kind of sensible 'normal' mesh here, the numbers +# of nodes/faces/edges are quite arbitrary and the connectivities we generate +# are pretty random too. +_TEST_N_NODES = 15 +_TEST_N_FACES = 3 +_TEST_N_EDGES = 5 +_TEST_N_BOUNDS = 4 + + +def sample_mesh(n_nodes=None, n_faces=None, n_edges=None): + """ + Make a test mesh. + + Mesh has faces edges, face-coords and edge-coords, numbers of which can be controlled. + + """ + if n_nodes is None: + n_nodes = _TEST_N_NODES + if n_faces is None: + n_faces = _TEST_N_FACES + if n_edges is None: + n_edges = _TEST_N_EDGES + node_x = AuxCoord( + 1100 + np.arange(n_nodes), + standard_name="longitude", + units="degrees_east", + long_name="long-name", + var_name="var-name", + attributes={"a": 1, "b": "c"}, + ) + node_y = AuxCoord(1200 + np.arange(n_nodes), standard_name="latitude") + + # Define a rather arbitrary edge-nodes connectivity. + # Some nodes are left out, because n_edges*2 < n_nodes. + conns = np.arange(n_edges * 2, dtype=int) + # Missing nodes include #0-5, because we add 5. + conns = ((conns + 5) % n_nodes).reshape((n_edges, 2)) + edge_nodes = Connectivity(conns, cf_role="edge_node_connectivity") + conns = np.arange(n_edges * 2, dtype=int) + + # Some numbers for the edge coordinates. + edge_x = AuxCoord(2100 + np.arange(n_edges), standard_name="longitude") + edge_y = AuxCoord(2200 + np.arange(n_edges), standard_name="latitude") + + # Define a rather arbitrary face-nodes connectivity. + # Some nodes are left out, because n_faces*n_bounds < n_nodes. + conns = np.arange(n_faces * _TEST_N_BOUNDS, dtype=int) + conns = (conns % n_nodes).reshape((n_faces, _TEST_N_BOUNDS)) + face_nodes = Connectivity(conns, cf_role="face_node_connectivity") + + # Some numbers for the edge coordinates. + face_x = AuxCoord(3100 + np.arange(n_faces), standard_name="longitude") + face_y = AuxCoord(3200 + np.arange(n_faces), standard_name="latitude") + + mesh = Mesh( + topology_dimension=2, + node_coords_and_axes=[(node_x, "x"), (node_y, "y")], + connectivities=[face_nodes, edge_nodes], + edge_coords_and_axes=[(edge_x, "x"), (edge_y, "y")], + face_coords_and_axes=[(face_x, "x"), (face_y, "y")], + ) + return mesh + + +def sample_meshcoord(mesh=None, location="face", axis="x", **extra_kwargs): + """ + Create a test MeshCoord. + + The creation args are defaulted, including the mesh. + If not provided as an arg, a new mesh is created with sample_mesh(). + + """ + if mesh is None: + mesh = sample_mesh() + result = MeshCoord(mesh=mesh, location=location, axis=axis, **extra_kwargs) + return result + + +def sample_mesh_cube( + nomesh=False, n_z=2, with_parts=False, **meshcoord_kwargs +): + """ + Create a 2d test cube with 1 'normal' and 1 unstructured dimension (with a Mesh). + + Result contains : dimcoords for both dims; an auxcoord on the unstructured dim; 2 mesh-coords. + By default, the mesh is provided by :func:`sample_mesh`, so coordinates and connectivity are not realistic. + + Kwargs: + * nomesh(bool): + If set, don't add MeshCoords, so dim 1 is just a plain anonymous dim. + * n_z (int): + Length of the 'normal' dim. If 0, it is *omitted*. + * with_parts (bool): + If set, return all the constituent component coords + * meshcoord_kwargs (dict): + Extra controls passed to :func:`sample_meshcoord` for MeshCoord creation, to allow user-specified + location/mesh. The 'axis' key is not available, as we always add both an 'x' and 'y' MeshCOord. + + Returns: + * cube : if with_parts not set + * (cube, parts) : if with_parts is set + 'parts' is (mesh, dim0-dimcoord, dim1-dimcoord, dim1-auxcoord, x-meshcoord [or None], y-meshcoord [or None]). + + """ + if nomesh: + mesh = None + n_faces = 5 + else: + mesh = meshcoord_kwargs.pop("mesh", None) + if mesh is None: + mesh = sample_mesh() + meshx, meshy = ( + sample_meshcoord(axis=axis, mesh=mesh, **meshcoord_kwargs) + for axis in ("x", "y") + ) + n_faces = meshx.shape[0] + + mesh_dimco = DimCoord( + np.arange(n_faces), long_name="i_mesh_face", units="1" + ) + + auxco_x = AuxCoord(np.zeros(n_faces), long_name="mesh_face_aux", units="1") + + zco = DimCoord(np.arange(n_z), long_name="level", units=1) + cube = Cube(np.zeros((n_z, n_faces)), long_name="mesh_phenom") + cube.add_dim_coord(zco, 0) + if nomesh: + mesh_coords = [] + else: + mesh_coords = [meshx, meshy] + + cube.add_dim_coord(mesh_dimco, 1) + for co in mesh_coords + [auxco_x]: + cube.add_aux_coord(co, 1) + + if not with_parts: + result = cube + else: + if nomesh: + meshx, meshy = None, None + parts = (mesh, zco, mesh_dimco, auxco_x, meshx, meshy) + result = (cube, parts) + + return result diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py new file mode 100644 index 0000000000..78dc08eafd --- /dev/null +++ b/lib/iris/tests/stock/netcdf.py @@ -0,0 +1,215 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Routines for generating synthetic NetCDF files from template headers.""" + +from pathlib import Path +from string import Template +import subprocess + +import netCDF4 +import numpy as np + + +def _file_from_cdl_template( + temp_file_dir, dataset_name, dataset_type, template_subs +): + """Shared template filling behaviour. + + Substitutes placeholders in the appropriate CDL template, saves to a + NetCDF file. + + """ + nc_write_path = ( + Path(temp_file_dir).joinpath(dataset_name).with_suffix(".nc") + ) + # Fetch the specified CDL template type. + templates_dir = Path(__file__).parent / "file_headers" + template_filepath = templates_dir.joinpath(dataset_type).with_suffix( + ".cdl" + ) + # Substitute placeholders. + with open(template_filepath) as file: + template_string = Template(file.read()) + cdl = template_string.substitute(template_subs) + + # Spawn an "ncgen" command to create an actual NetCDF file from the + # CDL string. + subprocess.run( + ["ncgen", "-o" + str(nc_write_path)], + input=cdl, + encoding="ascii", + check=True, + ) + + return nc_write_path + + +def _add_standard_data(nc_path, unlimited_dim_size=0): + """Shared data populating behaviour. + + Adds placeholder data to the variables in a NetCDF file, accounting for + dimension size, 'dimension coordinates' and a possible unlimited dimension. + + """ + + ds = netCDF4.Dataset(nc_path, "r+") + + unlimited_dim_names = [ + dim for dim in ds.dimensions if ds.dimensions[dim].size == 0 + ] + # Data addition dependent on this assumption: + assert len(unlimited_dim_names) < 2 + + # Fill variables data with placeholder numbers. + for var in ds.variables.values(): + shape = list(var.shape) + dims = var.dimensions + # Fill the unlimited dimension with the input size. + shape = [ + unlimited_dim_size if dim == unlimited_dim_names[0] else size + for dim, size in zip(dims, shape) + ] + data = np.zeros(shape, dtype=var.dtype) + if len(var.dimensions) == 1 and var.dimensions[0] == var.name: + # Fill the var with ascending values (not all zeroes), + # so it can be a dim-coord. + data = np.arange(data.size, dtype=data.dtype).reshape(data.shape) + var[:] = data + + ds.close() + + +def create_file__xios_2d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1 +): + """Create a synthetic NetCDF file with XIOS-like content. + + Starts from a template CDL headers string, modifies to the input + dimensions then adds data of the correct size. + + Parameters + ---------- + temp_file_dir : str or pathlib.Path + The directory in which to place the created file. + dataset_name : str + The name for the NetCDF dataset and also the created file. + n_faces, n_times: int + Dimension sizes for the dataset. + + Returns + ------- + str + Path of the created NetCDF file. + + """ + dataset_type = "xios_2D_face_half_levels" + + # Set the placeholder substitutions. + template_subs = { + "DATASET_NAME": dataset_name, + "NUM_NODES": n_faces + 2, + "NUM_FACES": n_faces, + } + + # Create a NetCDF file based on the dataset type template and substitutions. + nc_path = _file_from_cdl_template( + temp_file_dir, dataset_name, dataset_type, template_subs + ) + + # Populate with the standard set of data, sized correctly. + _add_standard_data(nc_path, unlimited_dim_size=n_times) + + return str(nc_path) + + +def create_file__xios_3d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 +): + """Create a synthetic NetCDF file with XIOS-like content. + + Starts from a template CDL headers string, modifies to the input + dimensions then adds data of the correct size. + + Parameters + ---------- + temp_file_dir : str or pathlib.Path + The directory in which to place the created file. + dataset_name : str + The name for the NetCDF dataset and also the created file. + n_faces, n_times, n_levels: int + Dimension sizes for the dataset. + + Returns + ------- + str + Path of the created NetCDF file. + + """ + dataset_type = "xios_3D_face_half_levels" + + # Set the placeholder substitutions. + template_subs = { + "DATASET_NAME": dataset_name, + "NUM_NODES": n_faces + 2, + "NUM_FACES": n_faces, + "NUM_LEVELS": n_levels, + } + + # Create a NetCDF file based on the dataset type template and + # substitutions. + nc_path = _file_from_cdl_template( + temp_file_dir, dataset_name, dataset_type, template_subs + ) + + # Populate with the standard set of data, sized correctly. + _add_standard_data(nc_path, unlimited_dim_size=n_times) + + return str(nc_path) + + +def create_file__xios_3d_face_full_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=39 +): + """Create a synthetic NetCDF file with XIOS-like content. + + Starts from a template CDL headers string, modifies to the input + dimensions then adds data of the correct size. + + Parameters + ---------- + temp_file_dir : str or pathlib.Path + The directory in which to place the created file. + dataset_name : str + The name for the NetCDF dataset and also the created file. + n_faces, n_times, n_levels: int + Dimension sizes for the dataset. + + Returns + ------- + str + Path of the created NetCDF file. + + """ + dataset_type = "xios_3D_face_full_levels" + + # Set the placeholder substitutions. + template_subs = { + "DATASET_NAME": dataset_name, + "NUM_NODES": n_faces + 2, + "NUM_FACES": n_faces, + "NUM_LEVELS": n_levels, + } + + # Create a NetCDF file based on the dataset type template and + # substitutions. + nc_path = _file_from_cdl_template( + temp_file_dir, dataset_name, dataset_type, template_subs + ) + + # Populate with the standard set of data, sized correctly. + _add_standard_data(nc_path, unlimited_dim_size=n_times) + + return str(nc_path) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 36e06202d1..969d987af3 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -16,11 +16,9 @@ import os.path import shutil import stat -from subprocess import check_call import tempfile from unittest import mock -from cf_units import as_unit import netCDF4 as nc import numpy as np import numpy.ma as ma @@ -29,10 +27,8 @@ from iris._lazy_data import is_lazy_data import iris.analysis.trajectory import iris.coord_systems as icoord_systems -from iris.coords import AncillaryVariable, CellMeasure from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf -from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.tests.stock as stock import iris.util @@ -251,153 +247,6 @@ def test_cell_methods(self): self.assertCML(cubes, ("netcdf", "netcdf_cell_methods.cml")) - def test_ancillary_variables(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - variables: - int64 qqv(axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:ancillary_variables = "my_av" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_av(axv) ; - my_av:units = "1" ; - my_av:long_name = "refs" ; - my_av:custom = "extra-attribute"; - data: - axv = 1, 2, 3; - my_av = 11., 12., 13.; - } - """ - self.tmpdir = tempfile.mkdtemp() - cdl_path = os.path.join(self.tmpdir, "tst.cdl") - nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(nc_load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) - expected = AncillaryVariable( - np.ma.array([11.0, 12.0, 13.0]), - long_name="refs", - var_name="my_av", - units="1", - attributes={"custom": "extra-attribute"}, - ) - self.assertEqual(avs[0], expected) - - def test_status_flags(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - variables: - int64 qqv(axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:ancillary_variables = "my_av" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - byte my_av(axv) ; - my_av:long_name = "qq status_flag" ; - my_av:flag_values = 1b, 2b ; - my_av:flag_meanings = "a b" ; - data: - axv = 11, 21, 31; - my_av = 1b, 1b, 2b; - } - """ - self.tmpdir = tempfile.mkdtemp() - cdl_path = os.path.join(self.tmpdir, "tst.cdl") - nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(nc_load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) - expected = AncillaryVariable( - np.ma.array([1, 1, 2], dtype=np.int8), - long_name="qq status_flag", - var_name="my_av", - units="no_unit", - attributes={ - "flag_values": np.array([1, 2], dtype=np.int8), - "flag_meanings": "a b", - }, - ) - self.assertEqual(avs[0], expected) - - def test_cell_measures(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - ayv = 2 ; - variables: - int64 qqv(ayv, axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:cell_measures = "area: my_areas" ; - int64 ayv(ayv) ; - ayv:units = "1" ; - ayv:long_name = "y" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_areas(ayv, axv) ; - my_areas:units = "m2" ; - my_areas:long_name = "standardised cell areas" ; - my_areas:custom = "extra-attribute"; - data: - axv = 11, 12, 13; - ayv = 21, 22; - my_areas = 110., 120., 130., 221., 231., 241.; - } - """ - self.tmpdir = tempfile.mkdtemp() - cdl_path = os.path.join(self.tmpdir, "tst.cdl") - nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(nc_load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - cms = cubes[0].cell_measures() - self.assertEqual(len(cms), 1) - expected = CellMeasure( - np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), - measure="area", - var_name="my_areas", - long_name="standardised cell areas", - units="m2", - attributes={"custom": "extra-attribute"}, - ) - self.assertEqual(cms[0], expected) - def test_deferred_loading(self): # Test exercising CF-netCDF deferred loading and deferred slicing. # shape (31, 161, 320) @@ -450,55 +299,6 @@ def test_deferred_loading(self): cube[0][(0, 2), (1, 3)], ("netcdf", "netcdf_deferred_mix_1.cml") ) - def test_default_units(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - ayv = 2 ; - variables: - int64 qqv(ayv, axv) ; - qqv:long_name = "qq" ; - qqv:ancillary_variables = "my_av" ; - qqv:cell_measures = "area: my_areas" ; - int64 ayv(ayv) ; - ayv:long_name = "y" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_av(axv) ; - my_av:long_name = "refs" ; - double my_areas(ayv, axv) ; - my_areas:long_name = "areas" ; - data: - axv = 11, 12, 13; - ayv = 21, 22; - my_areas = 110., 120., 130., 221., 231., 241.; - } - """ - self.tmpdir = tempfile.mkdtemp() - cdl_path = os.path.join(self.tmpdir, "tst.cdl") - nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(nc_load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("x").units, as_unit(1)) - self.assertEqual( - cubes[0].ancillary_variable("refs").units, as_unit("unknown") - ) - self.assertEqual( - cubes[0].cell_measure("areas").units, as_unit("unknown") - ) - def test_units(self): # Test exercising graceful cube and coordinate units loading. cube0, cube1 = sorted( diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index 416cc52f19..62e2b24891 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -39,10 +39,10 @@ def setUp(self): self.lenient = Lenient() def test_in(self): - self.assertTrue("maths", self.lenient) + self.assertIn("maths", self.lenient) def test_not_in(self): - self.assertTrue(("concatenate", self.lenient)) + self.assertNotIn("concatenate", self.lenient) class Test___getitem__(tests.IrisTest): @@ -180,3 +180,7 @@ def test_maths_enable__lenient_false(self): # still synchronised self.assertFalse(_LENIENT.enable) self.assertFalse(self.lenient["maths"]) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py index 33b08744ec..a434651206 100644 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py @@ -315,8 +315,8 @@ def test_op_lenient_same_measure_none(self): expected = right.copy() with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 1b3b196cab..848431565b 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -339,8 +339,8 @@ def test_op_lenient_same_cell_methods_none(self): expected = right.copy() with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertTrue(expected, rmetadata.combine(lmetadata)._asdict()) + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) def test_op_lenient_different(self): lmetadata = self.cls(**self.values) diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py new file mode 100644 index 0000000000..9c5987f235 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py @@ -0,0 +1,136 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.metadata_filter`. + +""" + +import numpy as np + +from iris.common.metadata import ( + CoordMetadata, + DimCoordMetadata, + metadata_filter, +) +from iris.coords import AuxCoord + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +Mock = tests.mock.Mock + + +class Test_standard(tests.IrisTest): + def test_instances_non_iterable(self): + item = Mock() + item.name.return_value = "one" + result = metadata_filter(item, item="one") + self.assertEqual(1, len(result)) + self.assertIn(item, result) + + def test_name(self): + name_one = Mock() + name_one.name.return_value = "one" + name_two = Mock() + name_two.name.return_value = "two" + input_list = [name_one, name_two] + result = metadata_filter(input_list, item="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_item(self): + coord = Mock(__class__=AuxCoord) + mock = Mock() + input_list = [coord, mock] + result = metadata_filter(input_list, item=coord) + self.assertIn(coord, result) + self.assertNotIn(mock, result) + + def test_item_metadata(self): + coord = Mock(metadata=CoordMetadata) + dim_coord = Mock(metadata=DimCoordMetadata) + input_list = [coord, dim_coord] + result = metadata_filter(input_list, item=coord) + self.assertIn(coord, result) + self.assertNotIn(dim_coord, result) + + def test_standard_name(self): + name_one = Mock(standard_name="one") + name_two = Mock(standard_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, standard_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_long_name(self): + name_one = Mock(long_name="one") + name_two = Mock(long_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, long_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_var_name(self): + name_one = Mock(var_name="one") + name_two = Mock(var_name="two") + input_list = [name_one, name_two] + result = metadata_filter(input_list, var_name="one") + self.assertIn(name_one, result) + self.assertNotIn(name_two, result) + + def test_attributes(self): + # Confirm that this can handle attrib dicts including np arrays. + attrib_one_two = Mock( + attributes={"one": np.arange(1), "two": np.arange(2)} + ) + attrib_three_four = Mock( + attributes={"three": np.arange(3), "four": np.arange(4)} + ) + input_list = [attrib_one_two, attrib_three_four] + result = metadata_filter( + input_list, attributes=attrib_one_two.attributes + ) + self.assertIn(attrib_one_two, result) + self.assertNotIn(attrib_three_four, result) + + def test_invalid_attributes(self): + attrib_one = Mock(attributes={"one": 1}) + input_list = [attrib_one] + self.assertRaisesRegex( + ValueError, + ".*expecting a dictionary.*", + metadata_filter, + input_list, + attributes="one", + ) + + def test_axis__by_guess(self): + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + axis_lon = Mock(standard_name="longitude") + del axis_lon.axis + axis_lat = Mock(standard_name="latitude") + del axis_lat.axis + input_list = [axis_lon, axis_lat] + result = metadata_filter(input_list, axis="x") + self.assertIn(axis_lon, result) + self.assertNotIn(axis_lat, result) + + def test_axis__by_member(self): + axis_x = Mock(axis="x") + axis_y = Mock(axis="y") + input_list = [axis_x, axis_y] + result = metadata_filter(input_list, axis="x") + self.assertEqual(1, len(result)) + self.assertIn(axis_x, result) + + def test_multiple_args(self): + coord_one = Mock(__class__=AuxCoord, long_name="one") + coord_two = Mock(__class__=AuxCoord, long_name="two") + input_list = [coord_one, coord_two] + result = metadata_filter(input_list, item=coord_one, long_name="one") + self.assertIn(coord_one, result) + self.assertNotIn(coord_two, result) diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py index 4edd889c73..e6a3b1de59 100644 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py @@ -25,11 +25,13 @@ CubeMetadata, metadata_manager_factory, ) +from iris.experimental.ugrid import ConnectivityMetadata BASES = [ AncillaryVariableMetadata, BaseMetadata, CellMeasureMetadata, + ConnectivityMetadata, CoordMetadata, CubeMetadata, ] diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py index e03ae250f1..f699234de8 100644 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py @@ -25,6 +25,7 @@ CubeMetadata, ) from iris.common.mixin import CFVariableMixin, LimitedAttributeDict +from iris.experimental.ugrid import ConnectivityMetadata class Test__getter(tests.IrisTest): @@ -284,6 +285,19 @@ def test_class_cellmeasuremetadata(self): self.item._metadata_manager.attributes, metadata.attributes ) + def test_class_connectivitymetadata(self): + self.args.update(dict(cf_role=None, start_index=None, src_dim=None)) + metadata = ConnectivityMetadata(**self.args) + self.item.metadata = metadata + expected = metadata._asdict() + del expected["cf_role"] + del expected["start_index"] + del expected["src_dim"] + self.assertEqual(self.item._metadata_manager.values, expected) + self.assertIsNot( + self.item._metadata_manager.attributes, metadata.attributes + ) + def test_class_coordmetadata(self): self.args.update(dict(coord_system=None, climatological=False)) metadata = CoordMetadata(**self.args) diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index 4f78d697b6..c7da5a54cb 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -21,6 +21,7 @@ from iris.analysis import MEAN, Aggregator, WeightedAggregator import iris.aux_factory from iris.aux_factory import HybridHeightFactory +from iris.common.metadata import BaseMetadata import iris.coords from iris.coords import ( AncillaryVariable, @@ -38,6 +39,11 @@ UnitConversionError, ) import iris.tests.stock as stock +from iris.tests.stock.mesh import ( + sample_mesh, + sample_mesh_cube, + sample_meshcoord, +) class Test___init___data(tests.IrisTest): @@ -2005,6 +2011,383 @@ def test__lazy(self): self._check_copy(cube, cube.copy()) +def _add_test_meshcube(self, nomesh=False, n_z=2, **meshcoord_kwargs): + """ + Common setup action : Create a standard mesh test cube with a variety of coords, and save the cube and various of + its components as properties of the 'self' TestCase. + + """ + cube, parts = sample_mesh_cube( + nomesh=nomesh, n_z=n_z, with_parts=True, **meshcoord_kwargs + ) + mesh, zco, mesh_dimco, auxco_x, meshx, meshy = parts + self.mesh = mesh + self.dimco_z = zco + self.dimco_mesh = mesh_dimco + if not nomesh: + self.meshco_x = meshx + self.meshco_y = meshy + self.auxco_x = auxco_x + self.allcoords = [meshx, meshy, zco, mesh_dimco, auxco_x] + self.cube = cube + + +class Test_coords__mesh_coords(tests.IrisTest): + """ + Checking *only* the new "mesh_coords" keyword of the coord/coords methods. + + This is *not* attached to the existing tests for this area, as they are + very old and patchy legacy tests. See: iris.tests.test_cdm.TestQueryCoord. + + """ + + def setUp(self): + # Create a standard test cube with a variety of types of coord. + _add_test_meshcube(self) + + def _assert_lists_equal(self, items_a, items_b): + """ + Check that two lists of coords, cubes etc contain the same things. + Lists must contain the same items, including any repeats, but can be in + a different order. + + """ + # Compare (and thus sort) by their *common* metadata. + def sortkey(item): + return BaseMetadata.from_metadata(item.metadata) + + items_a = sorted(items_a, key=sortkey) + items_b = sorted(items_b, key=sortkey) + self.assertEqual(items_a, items_b) + + def test_coords__all__meshcoords_default(self): + # coords() includes mesh-coords along with the others. + result = self.cube.coords() + expected = self.allcoords + self._assert_lists_equal(expected, result) + + def test_coords__all__meshcoords_only(self): + # Coords(mesh_coords=True) returns only mesh-coords. + result = self.cube.coords(mesh_coords=True) + expected = [self.meshco_x, self.meshco_y] + self._assert_lists_equal(expected, result) + + def test_coords__all__meshcoords_omitted(self): + # Coords(mesh_coords=False) omits the mesh-coords. + result = self.cube.coords(mesh_coords=False) + expected = set(self.allcoords) - set([self.meshco_x, self.meshco_y]) + self._assert_lists_equal(expected, result) + + def test_coords__axis__meshcoords(self): + # Coord (singular) with axis + mesh_coords=True + result = self.cube.coord(axis="x", mesh_coords=True) + self.assertIs(result, self.meshco_x) + + def test_coords__dimcoords__meshcoords(self): + # dim_coords and mesh_coords should be mutually exclusive. + result = self.cube.coords(dim_coords=True, mesh_coords=True) + self.assertEqual(result, []) + + def test_coords__nodimcoords__meshcoords(self): + # When mesh_coords=True, dim_coords=False should have no effect. + result = self.cube.coords(dim_coords=False, mesh_coords=True) + expected = [self.meshco_x, self.meshco_y] + self._assert_lists_equal(expected, result) + + +class Test_mesh(tests.IrisTest): + def setUp(self): + # Create a standard test cube with a variety of types of coord. + _add_test_meshcube(self) + + def test_mesh(self): + result = self.cube.mesh + self.assertIs(result, self.mesh) + + def test_no_mesh(self): + # Replace standard setUp cube with a no-mesh version. + _add_test_meshcube(self, nomesh=True) + result = self.cube.mesh + self.assertIsNone(result) + + +class Test_location(tests.IrisTest): + def setUp(self): + # Create a standard test cube with a variety of types of coord. + _add_test_meshcube(self) + + def test_no_mesh(self): + # Replace standard setUp cube with a no-mesh version. + _add_test_meshcube(self, nomesh=True) + result = self.cube.location + self.assertIsNone(result) + + def test_mesh(self): + cube = self.cube + result = cube.location + self.assertEqual(result, self.meshco_x.location) + + def test_alternate_location(self): + # Replace standard setUp cube with an edge-based version. + _add_test_meshcube(self, location="edge") + cube = self.cube + result = cube.location + self.assertEqual(result, "edge") + + +class Test_mesh_dim(tests.IrisTest): + def setUp(self): + # Create a standard test cube with a variety of types of coord. + _add_test_meshcube(self) + + def test_no_mesh(self): + # Replace standard setUp cube with a no-mesh version. + _add_test_meshcube(self, nomesh=True) + result = self.cube.mesh_dim() + self.assertIsNone(result) + + def test_mesh(self): + cube = self.cube + result = cube.mesh_dim() + self.assertEqual(result, 1) + + def test_alternate(self): + # Replace standard setUp cube with an edge-based version. + _add_test_meshcube(self, location="edge") + cube = self.cube + # Transpose the cube : the mesh dim is then 0 + cube.transpose() + result = cube.mesh_dim() + self.assertEqual(result, 0) + + +class Test__init__mesh(tests.IrisTest): + """ + Test that creation with mesh-coords functions, and prevents a cube having + incompatible mesh-coords. + + """ + + def setUp(self): + # Create a standard test mesh and other useful components. + mesh = sample_mesh() + meshco = sample_meshcoord(mesh=mesh) + self.mesh = mesh + self.meshco = meshco + self.nz = 2 + self.n_faces = meshco.shape[0] + + def test_mesh(self): + # Create a new cube from some of the parts. + nz, n_faces = self.nz, self.n_faces + dimco_z = DimCoord(np.arange(nz), long_name="z") + dimco_mesh = DimCoord(np.arange(n_faces), long_name="x") + meshco = self.meshco + cube = Cube( + np.zeros((nz, n_faces)), + dim_coords_and_dims=[(dimco_z, 0), (dimco_mesh, 1)], + aux_coords_and_dims=[(meshco, 1)], + ) + self.assertEqual(cube.mesh, meshco.mesh) + + def test_fail_dim_meshcoord(self): + # As "test_mesh", but attempt to use the meshcoord as a dim-coord. + # This should not be allowed. + nz, n_faces = self.nz, self.n_faces + dimco_z = DimCoord(np.arange(nz), long_name="z") + meshco = self.meshco + with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): + Cube( + np.zeros((nz, n_faces)), + dim_coords_and_dims=[(dimco_z, 0), (meshco, 1)], + ) + + def test_multi_meshcoords(self): + meshco_x = sample_meshcoord(axis="x", mesh=self.mesh) + meshco_y = sample_meshcoord(axis="y", mesh=self.mesh) + n_faces = meshco_x.shape[0] + cube = Cube( + np.zeros(n_faces), + aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], + ) + self.assertEqual(cube.mesh, meshco_x.mesh) + + def test_multi_meshcoords_same_axis(self): + # *Not* an error, as long as the coords are distinguishable. + meshco_1 = sample_meshcoord(axis="x", mesh=self.mesh) + meshco_2 = sample_meshcoord(axis="x", mesh=self.mesh) + # Can't make these different at creation, owing to the limited + # constructor args, but we can adjust common metadata afterwards. + meshco_2.rename("junk_name") + + n_faces = meshco_1.shape[0] + cube = Cube( + np.zeros(n_faces), + aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], + ) + self.assertEqual(cube.mesh, meshco_1.mesh) + + def test_fail_meshcoords_different_locations(self): + # Same as successful 'multi_mesh', but different locations. + # N.B. must have a mesh with n-faces == n-edges to test this + mesh = sample_mesh(n_faces=7, n_edges=7) + meshco_1 = sample_meshcoord(axis="x", mesh=mesh, location="face") + meshco_2 = sample_meshcoord(axis="y", mesh=mesh, location="edge") + # They should still have the same *shape* (or would fail anyway) + self.assertEqual(meshco_1.shape, meshco_2.shape) + n_faces = meshco_1.shape[0] + msg = "does not match existing cube location" + with self.assertRaisesRegex(ValueError, msg): + Cube( + np.zeros(n_faces), + aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], + ) + + def test_fail_meshcoords_different_meshes(self): + # Same as successful 'multi_mesh', but not sharing the same mesh. + # This one *is* an error. + # But that could relax in future, if we allow mesh equality testing + # (i.e. "mesh_a == mesh_b" when not "mesh_a is mesh_b") + meshco_x = sample_meshcoord(axis="x") + meshco_y = sample_meshcoord(axis="y") # Own (different) mesh + n_faces = meshco_x.shape[0] + with self.assertRaisesRegex(ValueError, "Mesh.* does not match"): + Cube( + np.zeros(n_faces), + aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], + ) + + def test_fail_meshcoords_different_dims(self): + # Same as 'test_mesh', but meshcoords on different dimensions. + # Replace standard setup with one where n_z == n_faces. + n_z, n_faces = 4, 4 + mesh = sample_mesh(n_faces=n_faces) + meshco_x = sample_meshcoord(mesh=mesh, axis="x") + meshco_y = sample_meshcoord(mesh=mesh, axis="y") + msg = "does not match existing cube mesh dimension" + with self.assertRaisesRegex(ValueError, msg): + Cube( + np.zeros((n_z, n_faces)), + aux_coords_and_dims=[(meshco_x, 1), (meshco_y, 0)], + ) + + +class Test__add_aux_coord__mesh(tests.IrisTest): + """ + Test that "Cube.add_aux_coord" functions with a mesh-coord, and prevents a + cube having incompatible mesh-coords. + + """ + + def setUp(self): + _add_test_meshcube(self) + # Remove the existing "meshco_y", so we can add similar ones without + # needing to distinguish from the existing. + self.cube.remove_coord(self.meshco_y) + + def test_add_compatible(self): + cube = self.cube + meshco_y = self.meshco_y + # Add the y-meshco back into the cube. + cube.add_aux_coord(meshco_y, 1) + self.assertIn(meshco_y, cube.coords(mesh_coords=True)) + + def test_add_multiple(self): + # Show that we can add extra mesh coords. + cube = self.cube + meshco_y = self.meshco_y + # Add the y-meshco back into the cube. + cube.add_aux_coord(meshco_y, 1) + # Make a duplicate y-meshco, renamed so it can add into the cube. + new_meshco_y = meshco_y.copy() + new_meshco_y.rename("alternative") + cube.add_aux_coord(new_meshco_y, 1) + self.assertEqual(len(cube.coords(mesh_coords=True)), 3) + + def test_fail_different_mesh(self): + # Make a duplicate y-meshco, and rename so it can add into the cube. + cube = self.cube + # Create 'meshco_y' duplicate, but a new mesh + meshco_y = sample_meshcoord(axis="y") + msg = "does not match existing cube mesh" + with self.assertRaisesRegex(ValueError, msg): + cube.add_aux_coord(meshco_y, 1) + + def test_fail_different_location(self): + # Make a new mesh with equal n_faces and n_edges + mesh = sample_mesh(n_faces=4, n_edges=4) + # Re-make the test objects based on that. + _add_test_meshcube(self, mesh=mesh) + cube = self.cube + cube.remove_coord(self.meshco_y) # Remove y-coord, as in setUp() + # Create a new meshco_y, same mesh but based on edges. + meshco_y = sample_meshcoord(axis="y", mesh=self.mesh, location="edge") + msg = "does not match existing cube location" + with self.assertRaisesRegex(ValueError, msg): + cube.add_aux_coord(meshco_y, 1) + + def test_fail_different_dimension(self): + # Re-make the test objects with the non-mesh dim equal in length. + n_faces = self.cube.shape[1] + _add_test_meshcube(self, n_z=n_faces) + cube = self.cube + meshco_y = self.meshco_y + cube.remove_coord(meshco_y) # Remove y-coord, as in setUp() + + # Attempt to re-attach the 'y' meshcoord, to a different cube dimension. + msg = "does not match existing cube mesh dimension" + with self.assertRaisesRegex(ValueError, msg): + cube.add_aux_coord(meshco_y, 0) + + +class Test__add_dim_coord__mesh(tests.IrisTest): + """ + Test that "Cube.add_dim_coord" cannot work with a mesh-coord. + + """ + + def test(self): + # Create a mesh with only 2 faces, so coord *can't* be non-monotonic. + mesh = sample_mesh(n_faces=2) + meshco = sample_meshcoord(mesh=mesh) + cube = Cube([0, 1]) + with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): + cube.add_dim_coord(meshco, 0) + + +class Test__eq__mesh(tests.IrisTest): + """ + Check that cubes with meshes support == as expected. + + Note: there is no special code for this in iris.cube.Cube : it is + provided by the coord comparisons. + + """ + + def setUp(self): + # Create a 'standard' test cube. + _add_test_meshcube(self) + + def test_copied_cube_match(self): + cube = self.cube + cube2 = cube.copy() + self.assertEqual(cube, cube2) + + def test_same_mesh_match(self): + cube1 = self.cube + # re-create an identical cube, using the same mesh. + _add_test_meshcube(self, mesh=self.mesh) + cube2 = self.cube + self.assertEqual(cube1, cube2) + + def test_new_mesh_different(self): + cube1 = self.cube + # re-create an identical cube, using the same mesh. + _add_test_meshcube(self) + cube2 = self.cube + self.assertNotEqual(cube1, cube2) + + class Test_dtype(tests.IrisTest): def setUp(self): self.dtypes = ( diff --git a/lib/iris/tests/unit/experimental/ugrid/__init__.py b/lib/iris/tests/unit/experimental/ugrid/__init__.py new file mode 100644 index 0000000000..7f55678f06 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris.experimental.ugrid` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridAuxiliaryCoordinateVariable.py new file mode 100644 index 0000000000..fb660d3a5f --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridAuxiliaryCoordinateVariable.py @@ -0,0 +1,235 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.CFUGridAuxiliaryCoordinateVariable` class. + +todo: fold these tests into cf tests when experimental.ugrid is folded into + standard behaviour. + +""" +import numpy as np + +from iris.experimental.ugrid import CFUGridAuxiliaryCoordinateVariable, logger + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.unit.experimental.ugrid.test_CFUGridReader import ( + netcdf_ugrid_variable, +) + + +def named_variable(name): + # Don't need to worry about dimensions or dtype for these tests. + return netcdf_ugrid_variable(name, "", np.int) + + +class TestIdentify(tests.IrisTest): + def setUp(self): + self.cf_identities = [ + "node_coordinates", + "edge_coordinates", + "face_coordinates", + "volume_coordinates", + ] + + def test_cf_identities(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + vars_common = { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + } + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridAuxiliaryCoordinateVariable( + subject_name, ref_subject + ) + } + + for identity in self.cf_identities: + ref_source = named_variable("ref_source") + setattr(ref_source, identity, subject_name) + vars_all = dict({"ref_source": ref_source}, **vars_common) + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_duplicate_refs(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for var in ref_source_vars.values(): + setattr(var, self.cf_identities[0], subject_name) + vars_all = dict( + { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + }, + **ref_source_vars, + ) + + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridAuxiliaryCoordinateVariable( + subject_name, ref_subject + ) + } + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_coords(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identities[ix], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # Not expecting ref_not_subject. + expected = { + name: CFUGridAuxiliaryCoordinateVariable(name, var) + for name, var in ref_subject_vars.items() + } + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_part_ref(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identities[0], " ".join(subject_names)) + vars_all = { + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + **ref_subject_vars, + } + + expected = { + name: CFUGridAuxiliaryCoordinateVariable(name, var) + for name, var in ref_subject_vars.items() + } + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_string_type_ignored(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identities[0], subject_name) + vars_all = { + subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) + self.assertDictEqual({}, result) + + def test_ignore(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identities[0], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the subject variable that hasn't been ignored. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridAuxiliaryCoordinateVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, ignore=subject_names[1] + ) + self.assertDictEqual(expected, result) + + def test_target(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + source_names = ("ref_source_1", "ref_source_2") + ref_source_vars = {name: named_variable(name) for name in source_names} + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identities[0], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the variable referenced by the named ref_source_var. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridAuxiliaryCoordinateVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, target=source_names[0] + ) + self.assertDictEqual(expected, result) + + def test_warn(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identities[0], subject_name) + vars_all = { + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} + + # Missing warning. + log_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) + + # String variable warning. + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridAuxiliaryCoordinateVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridConnectivityVariable.py new file mode 100644 index 0000000000..1a834ad554 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridConnectivityVariable.py @@ -0,0 +1,227 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.CFUGridConnectivityVariable` class. + +todo: fold these tests into cf tests when experimental.ugrid is folded into + standard behaviour. + +""" +import numpy as np + +from iris.experimental.ugrid import ( + CFUGridConnectivityVariable, + Connectivity, + logger, +) + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.unit.experimental.ugrid.test_CFUGridReader import ( + netcdf_ugrid_variable, +) + + +def named_variable(name): + # Don't need to worry about dimensions or dtype for these tests. + return netcdf_ugrid_variable(name, "", np.int) + + +class TestIdentify(tests.IrisTest): + def test_cf_identities(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + vars_common = { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + } + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridConnectivityVariable( + subject_name, ref_subject + ) + } + + for identity in Connectivity.UGRID_CF_ROLES: + ref_source = named_variable("ref_source") + setattr(ref_source, identity, subject_name) + vars_all = dict({"ref_source": ref_source}, **vars_common) + result = CFUGridConnectivityVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_duplicate_refs(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for var in ref_source_vars.values(): + setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_name) + vars_all = dict( + { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + }, + **ref_source_vars, + ) + + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridConnectivityVariable( + subject_name, ref_subject + ) + } + result = CFUGridConnectivityVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_cf_roles(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, Connectivity.UGRID_CF_ROLES[ix], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # Not expecting ref_not_subject. + expected = { + name: CFUGridConnectivityVariable(name, var) + for name, var in ref_subject_vars.items() + } + result = CFUGridConnectivityVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_part_ref_ignored(self): + # Not expected to handle more than one variable for a connectivity + # cf role - invalid UGRID. + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr( + ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name + " foo" + ) + vars_all = { + subject_name: named_variable(subject_name), + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + result = CFUGridConnectivityVariable.identify(vars_all) + self.assertDictEqual({}, result) + + def test_string_type_ignored(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name) + vars_all = { + subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + result = CFUGridConnectivityVariable.identify(vars_all) + self.assertDictEqual({}, result) + + def test_ignore(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the subject variable that hasn't been ignored. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridConnectivityVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridConnectivityVariable.identify( + vars_all, ignore=subject_names[1] + ) + self.assertDictEqual(expected, result) + + def test_target(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + source_names = ("ref_source_1", "ref_source_2") + ref_source_vars = {name: named_variable(name) for name in source_names} + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the variable referenced by the named ref_source_var. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridConnectivityVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridConnectivityVariable.identify( + vars_all, target=source_names[0] + ) + self.assertDictEqual(expected, result) + + def test_warn(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name) + vars_all = { + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} + + # Missing warning. + log_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridConnectivityVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) + + # String variable warning. + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridConnectivityVariable.identify( + vars_all, warn=warn + ) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridGroup.py new file mode 100644 index 0000000000..b0db286cb1 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridGroup.py @@ -0,0 +1,99 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.CFUGridGroup` class. + +todo: fold these tests into cf tests when experimental.ugrid is folded into + standard behaviour. + +""" +from unittest.mock import MagicMock + +from iris.experimental.ugrid import ( + CFUGridAuxiliaryCoordinateVariable, + CFUGridConnectivityVariable, + CFUGridGroup, + CFUGridMeshVariable, +) +from iris.fileformats.cf import CFCoordinateVariable, CFDataVariable + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class Tests(tests.IrisTest): + def setUp(self): + self.cf_group = CFUGridGroup() + + def test_inherited(self): + coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") + self.cf_group[coord_var.cf_name] = coord_var + self.assertEqual( + coord_var, self.cf_group.coordinates[coord_var.cf_name] + ) + + def test_connectivities(self): + conn_var = MagicMock( + spec=CFUGridConnectivityVariable, cf_name="conn_var" + ) + self.cf_group[conn_var.cf_name] = conn_var + self.assertEqual( + conn_var, self.cf_group.connectivities[conn_var.cf_name] + ) + + def test_ugrid_coords(self): + coord_var = MagicMock( + spec=CFUGridAuxiliaryCoordinateVariable, cf_name="coord_var" + ) + self.cf_group[coord_var.cf_name] = coord_var + self.assertEqual( + coord_var, self.cf_group.ugrid_coords[coord_var.cf_name] + ) + + def test_meshes(self): + mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") + self.cf_group[mesh_var.cf_name] = mesh_var + self.assertEqual(mesh_var, self.cf_group.meshes[mesh_var.cf_name]) + + def test_non_data_names(self): + data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") + coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") + conn_var = MagicMock( + spec=CFUGridConnectivityVariable, cf_name="conn_var" + ) + ugrid_coord_var = MagicMock( + spec=CFUGridAuxiliaryCoordinateVariable, cf_name="ugrid_coord_var" + ) + mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") + mesh_var2 = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var2") + duplicate_name_var = MagicMock( + spec=CFUGridMeshVariable, cf_name="coord_var" + ) + + for var in ( + data_var, + coord_var, + conn_var, + ugrid_coord_var, + mesh_var, + mesh_var2, + duplicate_name_var, + ): + self.cf_group[var.cf_name] = var + + expected_names = [ + var.cf_name + for var in ( + coord_var, + conn_var, + ugrid_coord_var, + mesh_var, + mesh_var2, + ) + ] + expected = set(expected_names) + self.assertEqual(expected, self.cf_group.non_data_variable_names) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridMeshVariable.py new file mode 100644 index 0000000000..e08dbc769e --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridMeshVariable.py @@ -0,0 +1,213 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.CFUGridMeshVariable` class. + +todo: fold these tests into cf tests when experimental.ugrid is folded into + standard behaviour. + +""" +import numpy as np + +from iris.experimental.ugrid import CFUGridMeshVariable, logger + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.unit.experimental.ugrid.test_CFUGridReader import ( + netcdf_ugrid_variable, +) + + +def named_variable(name): + # Don't need to worry about dimensions or dtype for these tests. + return netcdf_ugrid_variable(name, "", np.int) + + +class TestIdentify(tests.IrisTest): + def setUp(self): + self.cf_identity = "mesh" + + def test_cf_identity(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identity, subject_name) + vars_all = { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridMeshVariable(subject_name, ref_subject) + } + result = CFUGridMeshVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_duplicate_refs(self): + subject_name = "ref_subject" + ref_subject = named_variable(subject_name) + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for var in ref_source_vars.values(): + setattr(var, self.cf_identity, subject_name) + vars_all = dict( + { + subject_name: ref_subject, + "ref_not_subject": named_variable("ref_not_subject"), + }, + **ref_source_vars, + ) + + # ONLY expecting ref_subject, excluding ref_not_subject. + expected = { + subject_name: CFUGridMeshVariable(subject_name, ref_subject) + } + result = CFUGridMeshVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_refs(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identity, subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # Not expecting ref_not_subject. + expected = { + name: CFUGridMeshVariable(name, var) + for name, var in ref_subject_vars.items() + } + result = CFUGridMeshVariable.identify(vars_all) + self.assertDictEqual(expected, result) + + def test_two_part_ref_ignored(self): + # Not expected to handle more than one variable for a mesh + # cf role - invalid UGRID. + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identity, subject_name + " foo") + vars_all = { + subject_name: named_variable(subject_name), + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + result = CFUGridMeshVariable.identify(vars_all) + self.assertDictEqual({}, result) + + def test_string_type_ignored(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identity, subject_name) + vars_all = { + subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + result = CFUGridMeshVariable.identify(vars_all) + self.assertDictEqual({}, result) + + def test_ignore(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + ref_source_vars = { + name: named_variable(name) + for name in ("ref_source_1", "ref_source_2") + } + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identity, subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the subject variable that hasn't been ignored. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridMeshVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridMeshVariable.identify( + vars_all, ignore=subject_names[1] + ) + self.assertDictEqual(expected, result) + + def test_target(self): + subject_names = ("ref_subject_1", "ref_subject_2") + ref_subject_vars = { + name: named_variable(name) for name in subject_names + } + + source_names = ("ref_source_1", "ref_source_2") + ref_source_vars = {name: named_variable(name) for name in source_names} + for ix, var in enumerate(ref_source_vars.values()): + setattr(var, self.cf_identity, subject_names[ix]) + vars_all = dict( + {"ref_not_subject": named_variable("ref_not_subject")}, + **ref_subject_vars, + **ref_source_vars, + ) + + # ONLY expect the variable referenced by the named ref_source_var. + expected_name = subject_names[0] + expected = { + expected_name: CFUGridMeshVariable( + expected_name, ref_subject_vars[expected_name] + ) + } + result = CFUGridMeshVariable.identify(vars_all, target=source_names[0]) + self.assertDictEqual(expected, result) + + def test_warn(self): + subject_name = "ref_subject" + ref_source = named_variable("ref_source") + setattr(ref_source, self.cf_identity, subject_name) + vars_all = { + "ref_not_subject": named_variable("ref_not_subject"), + "ref_source": ref_source, + } + + # The warn kwarg and expected corresponding log level. + warn_and_level = {True: "WARNING", False: "DEBUG"} + + # Missing warning. + log_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + result = CFUGridMeshVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) + + # String variable warning. + log_regex = r".*is a CF-netCDF label variable.*" + for warn, level in warn_and_level.items(): + with self.assertLogs(logger, level=level, msg_regex=log_regex): + vars_all[subject_name] = netcdf_ugrid_variable( + subject_name, "", np.bytes_ + ) + result = CFUGridMeshVariable.identify(vars_all, warn=warn) + self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridReader.py new file mode 100644 index 0000000000..6449b83135 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_CFUGridReader.py @@ -0,0 +1,134 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.CFUGridGroup` class. + +todo: fold these tests into cf tests when experimental.ugrid is folded into + standard behaviour. + +""" +from unittest import mock + +import numpy as np + +from iris.experimental.ugrid import ( + CFUGridAuxiliaryCoordinateVariable, + CFUGridConnectivityVariable, + CFUGridGroup, + CFUGridMeshVariable, + CFUGridReader, +) +from iris.fileformats.cf import CFCoordinateVariable, CFDataVariable + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.unit.fileformats.cf.test_CFReader import netcdf_variable + + +def netcdf_ugrid_variable( + name, + dimensions, + dtype, + coordinates=None, +): + ncvar = netcdf_variable( + name=name, dimensions=dimensions, dtype=dtype, coordinates=coordinates + ) + + # Fill in all the extra UGRID attributes to prevent problems with getattr + # and Mock. Any attribute can be replaced in downstream setUp if present. + ugrid_attrs = ( + CFUGridAuxiliaryCoordinateVariable.cf_identities + + CFUGridConnectivityVariable.cf_identities + + [CFUGridMeshVariable.cf_identity] + ) + for attr in ugrid_attrs: + setattr(ncvar, attr, None) + + return ncvar + + +class Test_build_cf_groups(tests.IrisTest): + @classmethod + def setUpClass(cls): + # Replicating syntax from test_CFReader.Test_build_cf_groups__formula_terms. + cls.mesh = netcdf_ugrid_variable("mesh", "", np.int) + cls.node_x = netcdf_ugrid_variable("node_x", "node", np.float) + cls.node_y = netcdf_ugrid_variable("node_y", "node", np.float) + cls.face_x = netcdf_ugrid_variable("face_x", "face", np.float) + cls.face_y = netcdf_ugrid_variable("face_y", "face", np.float) + cls.face_nodes = netcdf_ugrid_variable( + "face_nodes", "face vertex", np.int + ) + cls.levels = netcdf_ugrid_variable("levels", "levels", np.int) + cls.data = netcdf_ugrid_variable( + "data", "levels face", np.float, coordinates="face_x face_y" + ) + + # Add necessary attributes for mesh recognition. + cls.mesh.cf_role = "mesh_topology" + cls.mesh.node_coordinates = "node_x node_y" + cls.mesh.face_coordinates = "face_x face_y" + cls.mesh.face_node_connectivity = "face_nodes" + cls.face_nodes.cf_role = "face_node_connectivity" + cls.data.mesh = "mesh" + + cls.variables = dict( + mesh=cls.mesh, + node_x=cls.node_x, + node_y=cls.node_y, + face_x=cls.face_x, + face_y=cls.face_y, + face_nodes=cls.face_nodes, + levels=cls.levels, + data=cls.data, + ) + ncattrs = mock.Mock(return_value=[]) + cls.dataset = mock.Mock( + file_format="NetCDF4", variables=cls.variables, ncattrs=ncattrs + ) + + def setUp(self): + # Restrict the CFUGridReader functionality to only performing + # translations and building first level cf-groups for variables. + self.patch("iris.experimental.ugrid.CFUGridReader._reset") + self.patch("netCDF4.Dataset", return_value=self.dataset) + cf_reader = CFUGridReader("dummy") + self.cf_group = cf_reader.cf_group + + def test_inherited(self): + for expected_var, collection in ( + [CFCoordinateVariable("levels", self.levels), "coordinates"], + [CFDataVariable("data", self.data), "data_variables"], + ): + expected = {expected_var.cf_name: expected_var} + self.assertDictEqual(expected, getattr(self.cf_group, collection)) + + def test_connectivities(self): + expected_var = CFUGridConnectivityVariable( + "face_nodes", self.face_nodes + ) + expected = {expected_var.cf_name: expected_var} + self.assertDictEqual(expected, self.cf_group.connectivities) + + def test_mesh(self): + expected_var = CFUGridMeshVariable("mesh", self.mesh) + expected = {expected_var.cf_name: expected_var} + self.assertDictEqual(expected, self.cf_group.meshes) + + def test_ugrid_coords(self): + names = [ + f"{loc}_{ax}" for loc in ("node", "face") for ax in ("x", "y") + ] + expected = { + name: CFUGridAuxiliaryCoordinateVariable(name, getattr(self, name)) + for name in names + } + self.assertDictEqual(expected, self.cf_group.ugrid_coords) + + def test_is_cf_ugrid_group(self): + self.assertIsInstance(self.cf_group, CFUGridGroup) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/test_Connectivity.py new file mode 100644 index 0000000000..319f875f53 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_Connectivity.py @@ -0,0 +1,347 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.experimental.ugrid.Connectivity` class.""" + +from xml.dom import minidom + +import numpy as np +from numpy import ma + +from iris._lazy_data import as_lazy_data, is_lazy_data +from iris.experimental.ugrid import Connectivity + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class TestStandard(tests.IrisTest): + def setUp(self): + # Crete an instance, with non-default arguments to allow testing of + # correct property setting. + self.kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((3, -1)), + "cf_role": "face_node_connectivity", + "long_name": "my_face_nodes", + "var_name": "face_nodes", + "attributes": {"notes": "this is a test"}, + "start_index": 1, + "src_dim": 1, + } + self.connectivity = Connectivity(**self.kwargs) + + def test_cf_role(self): + self.assertEqual(self.kwargs["cf_role"], self.connectivity.cf_role) + + def test_src_location(self): + expected = self.kwargs["cf_role"].split("_")[0] + self.assertEqual(expected, self.connectivity.src_location) + + def test_tgt_location(self): + expected = self.kwargs["cf_role"].split("_")[1] + self.assertEqual(expected, self.connectivity.tgt_location) + + def test_start_index(self): + self.assertEqual( + self.kwargs["start_index"], self.connectivity.start_index + ) + + def test_src_dim(self): + self.assertEqual(self.kwargs["src_dim"], self.connectivity.src_dim) + + def test_indices(self): + self.assertArrayEqual( + self.kwargs["indices"], self.connectivity.indices + ) + + def test_read_only(self): + attributes = ("indices", "cf_role", "start_index", "src_dim") + for attribute in attributes: + self.assertRaisesRegex( + AttributeError, + "can't set attribute", + setattr, + self.connectivity, + attribute, + 1, + ) + + def test_transpose(self): + expected_dim = 1 - self.kwargs["src_dim"] + expected_indices = self.kwargs["indices"].transpose() + new_connectivity = self.connectivity.transpose() + self.assertEqual(expected_dim, new_connectivity.src_dim) + self.assertArrayEqual(expected_indices, new_connectivity.indices) + + def test_lazy_indices(self): + self.assertTrue(is_lazy_data(self.connectivity.lazy_indices())) + + def test_core_indices(self): + self.assertArrayEqual( + self.kwargs["indices"], self.connectivity.core_indices() + ) + + def test_has_lazy_indices(self): + self.assertFalse(self.connectivity.has_lazy_indices()) + + def test_lazy_src_lengths(self): + self.assertTrue(is_lazy_data(self.connectivity.lazy_src_lengths())) + + def test_src_lengths(self): + expected = [3, 3, 3] + self.assertArrayEqual(expected, self.connectivity.src_lengths()) + + def test___str__(self): + expected = ( + "Connectivity(cf_role='face_node_connectivity', start_index=1)" + ) + self.assertEqual(expected, self.connectivity.__str__()) + + def test___repr__(self): + expected = ( + "Connectivity(array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]), " + "cf_role='face_node_connectivity', long_name='my_face_nodes', " + "var_name='face_nodes', attributes={'notes': 'this is a test'}, " + "start_index=1, src_dim=1)" + ) + self.assertEqual(expected, self.connectivity.__repr__()) + + def test_xml_element(self): + doc = minidom.Document() + connectivity_element = self.connectivity.xml_element(doc) + self.assertEqual(connectivity_element.tagName, "connectivity") + for attribute in ("cf_role", "start_index", "src_dim"): + self.assertIn(attribute, connectivity_element.attributes) + + def test___eq__(self): + equivalent_kwargs = self.kwargs + equivalent_kwargs["indices"] = self.kwargs["indices"].transpose() + equivalent_kwargs["src_dim"] = 1 - self.kwargs["src_dim"] + equivalent = Connectivity(**equivalent_kwargs) + self.assertFalse( + (equivalent.indices == self.connectivity.indices).all() + ) + self.assertEqual(equivalent, self.connectivity) + + def test_different(self): + different_kwargs = self.kwargs + different_kwargs["indices"] = self.kwargs["indices"].transpose() + different = Connectivity(**different_kwargs) + self.assertNotEqual(different, self.connectivity) + + def test_no_cube_dims(self): + self.assertRaises(NotImplementedError, self.connectivity.cube_dims, 1) + + def test_shape(self): + self.assertEqual(self.kwargs["indices"].shape, self.connectivity.shape) + + def test_ndim(self): + self.assertEqual(self.kwargs["indices"].ndim, self.connectivity.ndim) + + def test___getitem_(self): + subset = self.connectivity[:, 0:1] + self.assertArrayEqual(self.kwargs["indices"][:, 0:1], subset.indices) + + def test_copy(self): + new_indices = np.linspace(11, 16, 6, dtype=int).reshape((3, -1)) + copy_connectivity = self.connectivity.copy(new_indices) + self.assertArrayEqual(new_indices, copy_connectivity.indices) + + def test_indices_by_src(self): + expected = self.kwargs["indices"].transpose() + self.assertArrayEqual(expected, self.connectivity.indices_by_src()) + + def test_indices_by_src_input(self): + expected = as_lazy_data(self.kwargs["indices"].transpose()) + by_src = self.connectivity.indices_by_src( + self.connectivity.lazy_indices() + ) + self.assertArrayEqual(expected, by_src) + + +class TestAltIndices(tests.IrisTest): + def setUp(self): + mask = ([0, 0, 0, 0, 1] * 2) + [0, 0, 0, 1, 1] + data = np.linspace(1, 15, 15, dtype=int).reshape((-1, 5)) + self.masked_indices = ma.array(data=data, mask=mask) + self.lazy_indices = as_lazy_data(data) + + def common(self, indices): + connectivity = Connectivity( + indices=indices, cf_role="face_node_connectivity" + ) + self.assertArrayEqual(indices, connectivity.indices) + + def test_int32(self): + indices = np.linspace(1, 9, 9, dtype=np.int32).reshape((-1, 3)) + self.common(indices) + + def test_uint32(self): + indices = np.linspace(1, 9, 9, dtype=np.uint32).reshape((-1, 3)) + self.common(indices) + + def test_lazy(self): + self.common(self.lazy_indices) + + def test_masked(self): + self.common(self.masked_indices) + + def test_masked_lazy(self): + self.common(as_lazy_data(self.masked_indices)) + + def test_has_lazy_indices(self): + connectivity = Connectivity( + indices=self.lazy_indices, cf_role="face_node_connectivity" + ) + self.assertTrue(connectivity.has_lazy_indices()) + + +class TestValidations(tests.IrisTest): + def test_start_index(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), + "cf_role": "face_node_connectivity", + "start_index": 2, + } + self.assertRaisesRegex( + ValueError, "Invalid start_index .", Connectivity, **kwargs + ) + + def test_src_dim(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), + "cf_role": "face_node_connectivity", + "src_dim": 2, + } + self.assertRaisesRegex( + ValueError, "Invalid src_dim .", Connectivity, **kwargs + ) + + def test_cf_role(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), + "cf_role": "error", + } + self.assertRaisesRegex( + ValueError, "Invalid cf_role .", Connectivity, **kwargs + ) + + def test_indices_int(self): + kwargs = { + "indices": np.linspace(1, 9, 9).reshape((-1, 3)), + "cf_role": "face_node_connectivity", + } + self.assertRaisesRegex( + ValueError, + "dtype must be numpy integer subtype", + Connectivity, + **kwargs, + ) + + def test_indices_start_index(self): + kwargs = { + "indices": np.linspace(-9, -1, 9, dtype=int).reshape((-1, 3)), + "cf_role": "face_node_connectivity", + } + self.assertRaisesRegex( + ValueError, " < start_index", Connectivity, **kwargs + ) + + def test_indices_dims_low(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int), + "cf_role": "face_node_connectivity", + } + self.assertRaisesRegex( + ValueError, "Expected 2-dimensional shape,", Connectivity, **kwargs + ) + + def test_indices_dims_high(self): + kwargs = { + "indices": np.linspace(1, 12, 12, dtype=int).reshape((-1, 3, 2)), + "cf_role": "face_node_connectivity", + } + self.assertRaisesRegex( + ValueError, "Expected 2-dimensional shape,", Connectivity, **kwargs + ) + + def test_indices_locations_edge(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), + "cf_role": "edge_node_connectivity", + } + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len=2", + Connectivity, + **kwargs, + ) + + def test_indices_locations_face(self): + kwargs = { + "indices": np.linspace(1, 6, 6, dtype=int).reshape((-1, 2)), + "cf_role": "face_node_connectivity", + } + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len>=3", + Connectivity, + **kwargs, + ) + + def test_indices_locations_volume_face(self): + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), + "cf_role": "volume_face_connectivity", + } + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len>=4", + Connectivity, + **kwargs, + ) + + def test_indices_locations_volume_edge(self): + kwargs = { + "indices": np.linspace(1, 12, 12, dtype=int).reshape((-1, 3)), + "cf_role": "volume_edge_connectivity", + } + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len>=6", + Connectivity, + **kwargs, + ) + + def test_indices_locations_alt_dim(self): + """The transposed equivalent of `test_indices_locations_volume_face`.""" + kwargs = { + "indices": np.linspace(1, 9, 9, dtype=int).reshape((3, -1)), + "cf_role": "volume_face_connectivity", + "src_dim": 1, + } + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len>=4", + Connectivity, + **kwargs, + ) + + def test_indices_locations_masked(self): + mask = ([0, 0, 0] * 2) + [0, 0, 1] + data = np.linspace(1, 9, 9, dtype=int).reshape((3, -1)) + kwargs = { + "indices": ma.array(data=data, mask=mask), + "cf_role": "face_node_connectivity", + } + # Validation of individual location sizes (denoted by masks) only + # available through explicit call of Connectivity.validate_indices(). + connectivity = Connectivity(**kwargs) + self.assertRaisesRegex( + ValueError, + "Not all src_locations meet requirement: len>=3", + connectivity.validate_indices, + ) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/test_ConnectivityMetadata.py new file mode 100644 index 0000000000..423ead9a2e --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_ConnectivityMetadata.py @@ -0,0 +1,775 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.ConnectivityMetadata`. + +""" + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata +from iris.experimental.ugrid import ConnectivityMetadata + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.cf_role = mock.sentinel.cf_role + self.start_index = mock.sentinel.start_index + self.src_dim = mock.sentinel.src_dim + self.cls = ConnectivityMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + cf_role=self.cf_role, + start_index=self.start_index, + src_dim=self.src_dim, + ) + fmt = ( + "ConnectivityMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, cf_role={!r}, " + "start_index={!r}, src_dim={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.cf_role, + self.start_index, + self.src_dim, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "cf_role", + "start_index", + "src_dim", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test__eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + cf_role=sentinel.cf_role, + start_index=sentinel.start_index, + src_dim=sentinel.src_dim, + ) + self.dummy = sentinel.dummy + self.cls = ConnectivityMetadata + # The "src_dim" member is stateful only, and does not participate in + # lenient/strict equivalence. + self.members_no_src_dim = filter( + lambda member: member != "src_dim", self.cls._members + ) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.members_no_src_dim: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_src_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["src_dim"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_members(self): + for member in self.members_no_src_dim: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_src_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["src_dim"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members(self): + for member in self.members_no_src_dim: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_src_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["src_dim"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members_none(self): + for member in self.members_no_src_dim: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_src_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["src_dim"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = ConnectivityMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1, 1, 1) + self.attributes = self.cls(1, 1, 1, 1, 10, 1, 1, 1) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + cf_role=sentinel.cf_role, + start_index=sentinel.start_index, + src_dim=sentinel.src_dim, + ) + self.dummy = sentinel.dummy + self.cls = ConnectivityMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__ + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + cf_role=sentinel.cf_role, + start_index=sentinel.start_index, + src_dim=sentinel.src_dim, + ) + self.dummy = sentinel.dummy + self.cls = ConnectivityMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__ + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + member_value = getattr(lmetadata, member) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (member_value, None) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = (None, member_value) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = ConnectivityMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py new file mode 100644 index 0000000000..7a0425a6b6 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_Mesh.py @@ -0,0 +1,1139 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.experimental.ugrid.Mesh` class.""" + +import numpy as np + +from iris.coords import AuxCoord +from iris.exceptions import ConnectivityNotFoundError, CoordinateNotFoundError +from iris.experimental import ugrid + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class TestMeshCommon(tests.IrisTest): + @classmethod + def setUpClass(cls): + # A collection of minimal coords and connectivities describing an + # equilateral triangle. + cls.NODE_LON = AuxCoord( + [0, 2, 1], + standard_name="longitude", + long_name="long_name", + var_name="node_lon", + attributes={"test": 1}, + ) + cls.NODE_LAT = AuxCoord( + [0, 0, 1], standard_name="latitude", var_name="node_lat" + ) + cls.EDGE_LON = AuxCoord( + [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" + ) + cls.EDGE_LAT = AuxCoord( + [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" + ) + cls.FACE_LON = AuxCoord( + [0.5], standard_name="longitude", var_name="face_lon" + ) + cls.FACE_LAT = AuxCoord( + [0.5], standard_name="latitude", var_name="face_lat" + ) + + cls.EDGE_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], + cf_role="edge_node_connectivity", + long_name="long_name", + var_name="var_name", + attributes={"test": 1}, + ) + cls.FACE_NODE = ugrid.Connectivity( + [[0, 1, 2]], cf_role="face_node_connectivity" + ) + cls.FACE_EDGE = ugrid.Connectivity( + [[0, 1, 2]], cf_role="face_edge_connectivity" + ) + # (Actually meaningless:) + cls.FACE_FACE = ugrid.Connectivity( + [[0, 0, 0]], cf_role="face_face_connectivity" + ) + # (Actually meaningless:) + cls.EDGE_FACE = ugrid.Connectivity( + [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" + ) + cls.BOUNDARY_NODE = ugrid.Connectivity( + [[0, 1], [1, 2], [2, 0]], cf_role="boundary_node_connectivity" + ) + + +class TestProperties1D(TestMeshCommon): + # Tests that can re-use a single instance for greater efficiency. + @classmethod + def setUpClass(cls): + super().setUpClass() + # Mesh kwargs with topology_dimension=1 and all applicable + # arguments populated - this tests correct property setting. + cls.kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ((cls.NODE_LON, "x"), (cls.NODE_LAT, "y")), + "connectivities": cls.EDGE_NODE, + "long_name": "my_topology_mesh", + "var_name": "mesh", + "attributes": {"notes": "this is a test"}, + "node_dimension": "NodeDim", + "edge_dimension": "EdgeDim", + "edge_coords_and_axes": ((cls.EDGE_LON, "x"), (cls.EDGE_LAT, "y")), + } + cls.mesh = ugrid.Mesh(**cls.kwargs) + + def test__metadata_manager(self): + self.assertEqual( + self.mesh._metadata_manager.cls.__name__, + ugrid.MeshMetadata.__name__, + ) + + def test___getstate__(self): + expected = ( + self.mesh._metadata_manager, + self.mesh._coord_manager, + self.mesh._connectivity_manager, + ) + self.assertEqual(expected, self.mesh.__getstate__()) + + def test___repr__(self): + expected = ( + "Mesh(topology_dimension=1, node_coords_and_axes=[(AuxCoord(" + "array([0, 2, 1]), standard_name='longitude', units=Unit(" + "'unknown'), long_name='long_name', var_name='node_lon', " + "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='node_lat'), 'y')], connectivities=Connectivity(" + "cf_role='edge_node_connectivity', start_index=0), " + "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " + "standard_name='longitude', units=Unit('unknown'), " + "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='edge_lat'), 'y')], long_name='my_topology_mesh', " + "var_name='mesh', attributes={'notes': 'this is a test'}, " + "node_dimension='NodeDim', edge_dimension='EdgeDim')" + ) + self.assertEqual(expected, self.mesh.__repr__()) + + def test_all_connectivities(self): + expected = ugrid.Mesh1DConnectivities(self.EDGE_NODE) + self.assertEqual(expected, self.mesh.all_connectivities) + + def test_all_coords(self): + expected = ugrid.Mesh1DCoords( + self.NODE_LON, self.NODE_LAT, self.EDGE_LON, self.EDGE_LAT + ) + self.assertEqual(expected, self.mesh.all_coords) + + def test_boundary_node(self): + with self.assertRaises(AttributeError): + _ = self.mesh.boundary_node_connectivity + + def test_cf_role(self): + self.assertEqual("mesh_topology", self.mesh.cf_role) + # Read only. + self.assertRaises(AttributeError, setattr, self.mesh.cf_role, "foo", 1) + + def test_connectivities(self): + # General results. Method intended for inheritance. + positive_kwargs = ( + {"item": self.EDGE_NODE}, + {"item": "long_name"}, + {"long_name": "long_name"}, + {"var_name": "var_name"}, + {"attributes": {"test": 1}}, + {"cf_role": "edge_node_connectivity"}, + ) + + fake_connectivity = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="fake" + ) + negative_kwargs = ( + {"item": fake_connectivity}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + {"cf_role": "foo"}, + ) + + func = self.mesh.connectivities + for kwargs in positive_kwargs: + self.assertEqual([self.EDGE_NODE], func(**kwargs)) + for kwargs in negative_kwargs: + self.assertEqual([], func(**kwargs)) + + def test_connectivities_locations(self): + # topology_dimension-specific results. Method intended to be overridden. + positive_kwargs = ( + {"contains_node": True}, + {"contains_edge": True}, + {"contains_node": True, "contains_edge": True}, + ) + negative_kwargs = ( + {"contains_node": False}, + {"contains_edge": False}, + {"contains_edge": True, "contains_node": False}, + {"contains_edge": False, "contains_node": False}, + ) + + func = self.mesh.connectivities + for kwargs in positive_kwargs: + self.assertEqual([self.EDGE_NODE], func(**kwargs)) + for kwargs in negative_kwargs: + self.assertEqual([], func(**kwargs)) + + log_regex = r".*filter for non-existent.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.assertEqual([], func(contains_face=True)) + + def test_coord(self): + # See Mesh.coords tests for thorough coverage of cases. + func = self.mesh.coord + exception = CoordinateNotFoundError + self.assertRaisesRegex( + exception, ".*but found 2", func, include_nodes=True + ) + self.assertRaisesRegex(exception, ".*but found none", func, axis="t") + + def test_coords(self): + # General results. Method intended for inheritance. + positive_kwargs = ( + {"item": self.NODE_LON}, + {"item": "longitude"}, + {"standard_name": "longitude"}, + {"long_name": "long_name"}, + {"var_name": "node_lon"}, + {"attributes": {"test": 1}}, + ) + + fake_coord = AuxCoord([0]) + negative_kwargs = ( + {"item": fake_coord}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + ) + + func = self.mesh.coords + for kwargs in positive_kwargs: + self.assertIn(self.NODE_LON, func(**kwargs)) + for kwargs in negative_kwargs: + self.assertNotIn(self.NODE_LON, func(**kwargs)) + + def test_coords_locations(self): + # topology_dimension-specific results. Method intended to be overridden. + all_expected = { + "node_x": self.NODE_LON, + "node_y": self.NODE_LAT, + "edge_x": self.EDGE_LON, + "edge_y": self.EDGE_LAT, + } + + kwargs_expected = ( + ({"axis": "x"}, ["node_x", "edge_x"]), + ({"axis": "y"}, ["node_y", "edge_y"]), + ({"include_nodes": True}, ["node_x", "node_y"]), + ({"include_edges": True}, ["edge_x", "edge_y"]), + ({"include_nodes": False}, ["edge_x", "edge_y"]), + ({"include_edges": False}, ["node_x", "node_y"]), + ( + {"include_nodes": True, "include_edges": True}, + ["node_x", "node_y", "edge_x", "edge_y"], + ), + ({"include_nodes": False, "include_edges": False}, []), + ( + {"include_nodes": False, "include_edges": True}, + ["edge_x", "edge_y"], + ), + ) + + func = self.mesh.coords + for kwargs, expected in kwargs_expected: + expected = [all_expected[k] for k in expected if k in all_expected] + self.assertEqual(expected, func(**kwargs)) + + log_regex = r".*filter non-existent.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.assertEqual([], func(include_faces=True)) + + def test_edge_dimension(self): + self.assertEqual( + self.kwargs["edge_dimension"], self.mesh.edge_dimension + ) + + def test_edge_coords(self): + expected = ugrid.MeshEdgeCoords(self.EDGE_LON, self.EDGE_LAT) + self.assertEqual(expected, self.mesh.edge_coords) + + def test_edge_face(self): + with self.assertRaises(AttributeError): + _ = self.mesh.edge_face_connectivity + + def test_edge_node(self): + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) + + def test_face_coords(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_coords + + def test_face_dimension(self): + self.assertIsNone(self.mesh.face_dimension) + + def test_face_edge(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_edge_connectivity + + def test_face_face(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_face_connectivity + + def test_face_node(self): + with self.assertRaises(AttributeError): + _ = self.mesh.face_node_connectivity + + def test_node_coords(self): + expected = ugrid.MeshNodeCoords(self.NODE_LON, self.NODE_LAT) + self.assertEqual(expected, self.mesh.node_coords) + + def test_node_dimension(self): + self.assertEqual( + self.kwargs["node_dimension"], self.mesh.node_dimension + ) + + def test_topology_dimension(self): + self.assertEqual( + self.kwargs["topology_dimension"], self.mesh.topology_dimension + ) + # Read only. + self.assertRaises( + AttributeError, setattr, self.mesh.topology_dimension, "foo", 1 + ) + + +class TestProperties2D(TestProperties1D): + # Additional/specialised tests for topology_dimension=2. + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.kwargs["topology_dimension"] = 2 + cls.kwargs["connectivities"] = ( + cls.FACE_NODE, + cls.EDGE_NODE, + cls.FACE_EDGE, + cls.FACE_FACE, + cls.EDGE_FACE, + cls.BOUNDARY_NODE, + ) + cls.kwargs["face_dimension"] = "FaceDim" + cls.kwargs["face_coords_and_axes"] = ( + (cls.FACE_LON, "x"), + (cls.FACE_LAT, "y"), + ) + cls.mesh = ugrid.Mesh(**cls.kwargs) + + def test___repr__(self): + expected = ( + "Mesh(topology_dimension=2, node_coords_and_axes=[(AuxCoord(" + "array([0, 2, 1]), standard_name='longitude', units=Unit(" + "'unknown'), long_name='long_name', var_name='node_lon', " + "attributes={'test': 1}), 'x'), (AuxCoord(array([0, 0, 1]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='node_lat'), 'y')], connectivities=[Connectivity(" + "cf_role='face_node_connectivity', start_index=0), Connectivity(" + "cf_role='edge_node_connectivity', start_index=0), Connectivity(" + "cf_role='face_edge_connectivity', start_index=0), Connectivity(" + "cf_role='face_face_connectivity', start_index=0), Connectivity(" + "cf_role='edge_face_connectivity', start_index=0), Connectivity(" + "cf_role='boundary_node_connectivity', start_index=0)], " + "edge_coords_and_axes=[(AuxCoord(array([1. , 1.5, 0.5]), " + "standard_name='longitude', units=Unit('unknown'), " + "var_name='edge_lon'), 'x'), (AuxCoord(array([0. , 0.5, 0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='edge_lat'), 'y')], face_coords_and_axes=[(AuxCoord(" + "array([0.5]), standard_name='longitude', units=Unit('unknown'), " + "var_name='face_lon'), 'x'), (AuxCoord(array([0.5]), " + "standard_name='latitude', units=Unit('unknown'), " + "var_name='face_lat'), 'y')], long_name='my_topology_mesh', " + "var_name='mesh', attributes={'notes': 'this is a test'}, " + "node_dimension='NodeDim', edge_dimension='EdgeDim', " + "face_dimension='FaceDim')" + ) + self.assertEqual(expected, self.mesh.__repr__()) + + def test_all_connectivities(self): + expected = ugrid.Mesh2DConnectivities( + self.FACE_NODE, + self.EDGE_NODE, + self.FACE_EDGE, + self.FACE_FACE, + self.EDGE_FACE, + self.BOUNDARY_NODE, + ) + self.assertEqual(expected, self.mesh.all_connectivities) + + def test_all_coords(self): + expected = ugrid.Mesh2DCoords( + self.NODE_LON, + self.NODE_LAT, + self.EDGE_LON, + self.EDGE_LAT, + self.FACE_LON, + self.FACE_LAT, + ) + self.assertEqual(expected, self.mesh.all_coords) + + def test_boundary_node(self): + self.assertEqual( + self.BOUNDARY_NODE, self.mesh.boundary_node_connectivity + ) + + def test_connectivity(self): + # See Mesh.connectivities tests for thorough coverage of cases. + # Can only test Mesh.connectivity for 2D since we need >1 connectivity. + func = self.mesh.connectivity + exception = ConnectivityNotFoundError + self.assertRaisesRegex( + exception, ".*but found 3", func, contains_node=True + ) + self.assertRaisesRegex( + exception, + ".*but found none", + func, + contains_node=False, + contains_edge=False, + contains_face=False, + ) + + def test_connectivities_locations(self): + kwargs_expected = ( + ( + {"contains_node": True}, + [self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE], + ), + ( + {"contains_edge": True}, + [self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE], + ), + ( + {"contains_face": True}, + [ + self.FACE_NODE, + self.FACE_EDGE, + self.FACE_FACE, + self.EDGE_FACE, + ], + ), + ( + {"contains_node": False}, + [self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE], + ), + ( + {"contains_edge": False}, + [self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE], + ), + ({"contains_face": False}, [self.EDGE_NODE, self.BOUNDARY_NODE]), + ( + {"contains_edge": True, "contains_face": True}, + [self.FACE_EDGE, self.EDGE_FACE], + ), + ( + {"contains_node": False, "contains_edge": False}, + [self.FACE_FACE], + ), + ( + {"contains_node": True, "contains_edge": False}, + [self.FACE_NODE, self.BOUNDARY_NODE], + ), + ( + { + "contains_node": False, + "contains_edge": False, + "contains_face": False, + }, + [], + ), + ) + func = self.mesh.connectivities + for kwargs, expected in kwargs_expected: + result = func(**kwargs) + self.assertEqual(len(expected), len(result)) + for item in expected: + self.assertIn(item, result) + + def test_coords_locations(self): + all_expected = { + "node_x": self.NODE_LON, + "node_y": self.NODE_LAT, + "edge_x": self.EDGE_LON, + "edge_y": self.EDGE_LAT, + "face_x": self.FACE_LON, + "face_y": self.FACE_LAT, + } + + kwargs_expected = ( + ({"axis": "x"}, ["node_x", "edge_x", "face_x"]), + ({"axis": "y"}, ["node_y", "edge_y", "face_y"]), + ({"include_nodes": True}, ["node_x", "node_y"]), + ({"include_edges": True}, ["edge_x", "edge_y"]), + ( + {"include_nodes": False}, + ["edge_x", "edge_y", "face_x", "face_y"], + ), + ( + {"include_edges": False}, + ["node_x", "node_y", "face_x", "face_y"], + ), + ( + {"include_faces": False}, + ["node_x", "node_y", "edge_x", "edge_y"], + ), + ( + {"include_faces": True, "include_edges": True}, + ["edge_x", "edge_y", "face_x", "face_y"], + ), + ( + {"include_faces": False, "include_edges": False}, + ["node_x", "node_y"], + ), + ( + {"include_faces": False, "include_edges": True}, + ["edge_x", "edge_y"], + ), + ) + + func = self.mesh.coords + for kwargs, expected in kwargs_expected: + expected = [all_expected[k] for k in expected if k in all_expected] + self.assertEqual(expected, func(**kwargs)) + + def test_edge_face(self): + self.assertEqual(self.EDGE_FACE, self.mesh.edge_face_connectivity) + + def test_face_coords(self): + expected = ugrid.MeshFaceCoords(self.FACE_LON, self.FACE_LAT) + self.assertEqual(expected, self.mesh.face_coords) + + def test_face_dimension(self): + self.assertEqual( + self.kwargs["face_dimension"], self.mesh.face_dimension + ) + + def test_face_edge(self): + self.assertEqual(self.FACE_EDGE, self.mesh.face_edge_connectivity) + + def test_face_face(self): + self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) + + def test_face_node(self): + self.assertEqual(self.FACE_NODE, self.mesh.face_node_connectivity) + + +class TestOperations1D(TestMeshCommon): + # Tests that cannot re-use an existing Mesh instance, instead need a new + # one each time. + def setUp(self): + self.mesh = ugrid.Mesh( + topology_dimension=1, + node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), + connectivities=self.EDGE_NODE, + ) + + @staticmethod + def new_connectivity(connectivity, new_len=False): + """Provide a new connectivity recognisably different from the original.""" + # NOTE: assumes non-transposed connectivity (src_dim=0). + if new_len: + shape = (connectivity.shape[0] + 1, connectivity.shape[1]) + else: + shape = connectivity.shape + return connectivity.copy(np.zeros(shape, dtype=int)) + + @staticmethod + def new_coord(coord, new_shape=False): + """Provide a new coordinate recognisably different from the original.""" + if new_shape: + shape = tuple([i + 1 for i in coord.shape]) + else: + shape = coord.shape + return coord.copy(np.zeros(shape)) + + def test___setstate__(self): + false_metadata_manager = "foo" + false_coord_manager = "bar" + false_connectivity_manager = "baz" + self.mesh.__setstate__( + ( + false_metadata_manager, + false_coord_manager, + false_connectivity_manager, + ) + ) + + self.assertEqual(false_metadata_manager, self.mesh._metadata_manager) + self.assertEqual(false_coord_manager, self.mesh._coord_manager) + self.assertEqual( + false_connectivity_manager, self.mesh._connectivity_manager + ) + + def test_add_connectivities(self): + # Cannot test ADD - 1D - nothing extra to add beyond minimum. + + for new_len in (False, True): + # REPLACE connectivities, first with one of the same length, then + # with one of different length. + edge_node = self.new_connectivity(self.EDGE_NODE, new_len) + self.mesh.add_connectivities(edge_node) + self.assertEqual( + ugrid.Mesh1DConnectivities(edge_node), + self.mesh.all_connectivities, + ) + + def test_add_connectivities_duplicates(self): + edge_node_one = self.EDGE_NODE + edge_node_two = self.new_connectivity(self.EDGE_NODE) + self.mesh.add_connectivities(edge_node_one, edge_node_two) + self.assertEqual( + edge_node_two, + self.mesh.edge_node_connectivity, + ) + + def test_add_connectivities_invalid(self): + self.assertRaisesRegex( + TypeError, + "Expected Connectivity.*", + self.mesh.add_connectivities, + "foo", + ) + + face_node = self.FACE_NODE + log_regex = r"Not adding connectivity.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.mesh.add_connectivities(face_node) + + def test_add_coords(self): + # ADD coords. + edge_kwargs = {"edge_x": self.EDGE_LON, "edge_y": self.EDGE_LAT} + self.mesh.add_coords(**edge_kwargs) + self.assertEqual( + ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords + ) + + for new_shape in (False, True): + # REPLACE coords, first with ones of the same shape, then with ones + # of different shape. + node_kwargs = { + "node_x": self.new_coord(self.NODE_LON, new_shape), + "node_y": self.new_coord(self.NODE_LAT, new_shape), + } + edge_kwargs = { + "edge_x": self.new_coord(self.EDGE_LON, new_shape), + "edge_y": self.new_coord(self.EDGE_LAT, new_shape), + } + self.mesh.add_coords(**node_kwargs, **edge_kwargs) + self.assertEqual( + ugrid.MeshNodeCoords(**node_kwargs), self.mesh.node_coords + ) + self.assertEqual( + ugrid.MeshEdgeCoords(**edge_kwargs), self.mesh.edge_coords + ) + + def test_add_coords_face(self): + self.assertRaises( + TypeError, + self.mesh.add_coords, + face_x=self.FACE_LON, + face_y=self.FACE_LAT, + ) + + def test_add_coords_invalid(self): + func = self.mesh.add_coords + self.assertRaisesRegex( + TypeError, ".*requires to be an 'AuxCoord'.*", func, node_x="foo" + ) + self.assertRaisesRegex( + TypeError, ".*requires a x-axis like.*", func, node_x=self.NODE_LAT + ) + climatological = AuxCoord( + [0], + bounds=[-1, 1], + standard_name="longitude", + climatological=True, + units="Days since 1970", + ) + self.assertRaisesRegex( + TypeError, + ".*cannot be a climatological.*", + func, + node_x=climatological, + ) + wrong_shape = self.NODE_LON.copy([0]) + self.assertRaisesRegex( + ValueError, ".*requires to have shape.*", func, node_x=wrong_shape + ) + + def test_add_coords_single(self): + # ADD coord. + edge_x = self.EDGE_LON + expected = ugrid.MeshEdgeCoords(edge_x=edge_x, edge_y=None) + self.mesh.add_coords(edge_x=edge_x) + self.assertEqual(expected, self.mesh.edge_coords) + + # REPLACE coords. + node_x = self.new_coord(self.NODE_LON) + edge_x = self.new_coord(self.EDGE_LON) + expected_nodes = ugrid.MeshNodeCoords( + node_x=node_x, node_y=self.mesh.node_coords.node_y + ) + expected_edges = ugrid.MeshEdgeCoords(edge_x=edge_x, edge_y=None) + self.mesh.add_coords(node_x=node_x, edge_x=edge_x) + self.assertEqual(expected_nodes, self.mesh.node_coords) + self.assertEqual(expected_edges, self.mesh.edge_coords) + + # Attempt to REPLACE coords with those of DIFFERENT SHAPE. + node_x = self.new_coord(self.NODE_LON, new_shape=True) + edge_x = self.new_coord(self.EDGE_LON, new_shape=True) + node_kwarg = {"node_x": node_x} + edge_kwarg = {"edge_x": edge_x} + both_kwargs = dict(**node_kwarg, **edge_kwarg) + for kwargs in (node_kwarg, edge_kwarg, both_kwargs): + self.assertRaisesRegex( + ValueError, + ".*requires to have shape.*", + self.mesh.add_coords, + **kwargs, + ) + + def test_add_coords_single_face(self): + self.assertRaises( + TypeError, self.mesh.add_coords, face_x=self.FACE_LON + ) + + def test_dimension_names(self): + # Test defaults. + default = ugrid.Mesh1DNames("Mesh1d_node", "Mesh1d_edge") + self.assertEqual(default, self.mesh.dimension_names()) + + log_regex = r"Not setting face_dimension.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.mesh.dimension_names("foo", "bar", "baz") + self.assertEqual( + ugrid.Mesh1DNames("foo", "bar"), self.mesh.dimension_names() + ) + + self.mesh.dimension_names_reset(True, True, True) + self.assertEqual(default, self.mesh.dimension_names()) + + # Single. + self.mesh.dimension_names(edge="foo") + self.assertEqual("foo", self.mesh.edge_dimension) + self.mesh.dimension_names_reset(edge=True) + self.assertEqual(default, self.mesh.dimension_names()) + + def test_edge_dimension_set(self): + self.mesh.edge_dimension = "foo" + self.assertEqual("foo", self.mesh.edge_dimension) + + def test_face_dimension_set(self): + log_regex = r"Not setting face_dimension.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.mesh.face_dimension = "foo" + self.assertIsNone(self.mesh.face_dimension) + + def test_node_dimension_set(self): + self.mesh.node_dimension = "foo" + self.assertEqual("foo", self.mesh.node_dimension) + + def test_remove_connectivities(self): + """ + Test that remove() mimics the connectivities() method correctly, + and prevents removal of mandatory connectivities. + + """ + positive_kwargs = ( + {"item": self.EDGE_NODE}, + {"item": "long_name"}, + {"long_name": "long_name"}, + {"var_name": "var_name"}, + {"attributes": {"test": 1}}, + {"cf_role": "edge_node_connectivity"}, + {"contains_node": True}, + {"contains_edge": True}, + {"contains_edge": True, "contains_node": True}, + ) + + fake_connectivity = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="fake" + ) + negative_kwargs = ( + {"item": fake_connectivity}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + {"cf_role": "foo"}, + {"contains_node": False}, + {"contains_edge": False}, + {"contains_edge": True, "contains_node": False}, + {"contains_edge": False, "contains_node": False}, + ) + + log_regex = r"Ignoring request to remove.*" + for kwargs in positive_kwargs: + with self.assertLogs( + ugrid.logger, level="DEBUG", msg_regex=log_regex + ): + self.mesh.remove_connectivities(**kwargs) + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) + for kwargs in negative_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + # Check that the only debug log is the one we inserted. + ugrid.logger.debug("foo", extra=dict(cls=None)) + self.mesh.remove_connectivities(**kwargs) + self.assertEqual(1, len(log.records)) + self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) + + def test_remove_coords(self): + # Test that remove() mimics the coords() method correctly, + # and prevents removal of mandatory coords. + positive_kwargs = ( + {"item": self.NODE_LON}, + {"item": "longitude"}, + {"standard_name": "longitude"}, + {"long_name": "long_name"}, + {"var_name": "node_lon"}, + {"attributes": {"test": 1}}, + ) + + fake_coord = AuxCoord([0]) + negative_kwargs = ( + {"item": fake_coord}, + {"item": "foo"}, + {"standard_name": "air_temperature"}, + {"long_name": "foo"}, + {"var_name": "foo"}, + {"attributes": {"test": 2}}, + ) + + log_regex = r"Ignoring request to remove.*" + for kwargs in positive_kwargs: + with self.assertLogs( + ugrid.logger, level="DEBUG", msg_regex=log_regex + ): + self.mesh.remove_coords(**kwargs) + self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) + for kwargs in negative_kwargs: + with self.assertLogs(ugrid.logger, level="DEBUG") as log: + # Check that the only debug log is the one we inserted. + ugrid.logger.debug("foo", extra=dict(cls=None)) + self.mesh.remove_coords(**kwargs) + self.assertEqual(1, len(log.records)) + self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) + + # Test removal of optional connectivity. + self.mesh.add_coords(edge_x=self.EDGE_LON) + # Attempt to remove a non-existent coord. + self.mesh.remove_coords(self.EDGE_LAT) + # Confirm that EDGE_LON is still there. + self.assertEqual(self.EDGE_LON, self.mesh.edge_coords.edge_x) + # Remove EDGE_LON and confirm success. + self.mesh.remove_coords(self.EDGE_LON) + self.assertEqual(None, self.mesh.edge_coords.edge_x) + + def test_to_MeshCoord(self): + location = "node" + axis = "x" + result = self.mesh.to_MeshCoord(location, axis) + self.assertIsInstance(result, ugrid.MeshCoord) + self.assertEqual(location, result.location) + self.assertEqual(axis, result.axis) + + def test_to_MeshCoord_face(self): + location = "face" + axis = "x" + self.assertRaises( + CoordinateNotFoundError, self.mesh.to_MeshCoord, location, axis + ) + + def test_to_MeshCoords(self): + location = "node" + result = self.mesh.to_MeshCoords(location) + self.assertEqual(len(self.mesh.AXES), len(result)) + for ix, axis in enumerate(self.mesh.AXES): + coord = result[ix] + self.assertIsInstance(coord, ugrid.MeshCoord) + self.assertEqual(location, coord.location) + self.assertEqual(axis, coord.axis) + + def test_to_MeshCoords_face(self): + location = "face" + self.assertRaises( + CoordinateNotFoundError, self.mesh.to_MeshCoords, location + ) + + +class TestOperations2D(TestOperations1D): + # Additional/specialised tests for topology_dimension=2. + def setUp(self): + self.mesh = ugrid.Mesh( + topology_dimension=2, + node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), + connectivities=(self.FACE_NODE), + ) + + def test_add_connectivities(self): + # ADD connectivities. + kwargs = { + "edge_node": self.EDGE_NODE, + "face_edge": self.FACE_EDGE, + "face_face": self.FACE_FACE, + "edge_face": self.EDGE_FACE, + "boundary_node": self.BOUNDARY_NODE, + } + expected = ugrid.Mesh2DConnectivities( + face_node=self.mesh.face_node_connectivity, **kwargs + ) + self.mesh.add_connectivities(*kwargs.values()) + self.assertEqual(expected, self.mesh.all_connectivities) + + # REPLACE connectivities. + kwargs["face_node"] = self.FACE_NODE + for new_len in (False, True): + # First replace with ones of same length, then with ones of + # different length. + kwargs = { + k: self.new_connectivity(v, new_len) for k, v in kwargs.items() + } + self.mesh.add_connectivities(*kwargs.values()) + self.assertEqual( + ugrid.Mesh2DConnectivities(**kwargs), + self.mesh.all_connectivities, + ) + + def test_add_connectivities_inconsistent(self): + # ADD Connectivities. + self.mesh.add_connectivities(self.EDGE_NODE) + face_edge = self.new_connectivity(self.FACE_EDGE, new_len=True) + edge_face = self.new_connectivity(self.EDGE_FACE, new_len=True) + for args in ([face_edge], [edge_face], [face_edge, edge_face]): + self.assertRaisesRegex( + ValueError, + "inconsistent .* counts.", + self.mesh.add_connectivities, + *args, + ) + + # REPLACE Connectivities + self.mesh.add_connectivities(self.FACE_EDGE, self.EDGE_FACE) + for args in ([face_edge], [edge_face], [face_edge, edge_face]): + self.assertRaisesRegex( + ValueError, + "inconsistent .* counts.", + self.mesh.add_connectivities, + *args, + ) + + def test_add_connectivities_invalid(self): + fake_cf_role = tests.mock.Mock( + __class__=ugrid.Connectivity, cf_role="foo" + ) + log_regex = r"Not adding connectivity.*" + with self.assertLogs(ugrid.logger, level="DEBUG", msg_regex=log_regex): + self.mesh.add_connectivities(fake_cf_role) + + def test_add_coords_face(self): + # ADD coords. + kwargs = {"face_x": self.FACE_LON, "face_y": self.FACE_LAT} + self.mesh.add_coords(**kwargs) + self.assertEqual(ugrid.MeshFaceCoords(**kwargs), self.mesh.face_coords) + + for new_shape in (False, True): + # REPLACE coords, first with ones of the same shape, then with ones + # of different shape. + kwargs = { + "face_x": self.new_coord(self.FACE_LON, new_shape), + "face_y": self.new_coord(self.FACE_LAT, new_shape), + } + self.mesh.add_coords(**kwargs) + self.assertEqual( + ugrid.MeshFaceCoords(**kwargs), self.mesh.face_coords + ) + + def test_add_coords_single_face(self): + # ADD coord. + face_x = self.FACE_LON + expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) + self.mesh.add_coords(face_x=face_x) + self.assertEqual(expected, self.mesh.face_coords) + + # REPLACE coord. + face_x = self.new_coord(self.FACE_LON) + expected = ugrid.MeshFaceCoords(face_x=face_x, face_y=None) + self.mesh.add_coords(face_x=face_x) + self.assertEqual(expected, self.mesh.face_coords) + + # Attempt to REPLACE coord with that of DIFFERENT SHAPE. + face_x = self.new_coord(self.FACE_LON, new_shape=True) + self.assertRaisesRegex( + ValueError, + ".*requires to have shape.*", + self.mesh.add_coords, + face_x=face_x, + ) + + def test_dimension_names(self): + # Test defaults. + default = ugrid.Mesh2DNames( + "Mesh2d_node", "Mesh2d_edge", "Mesh2d_face" + ) + self.assertEqual(default, self.mesh.dimension_names()) + + self.mesh.dimension_names("foo", "bar", "baz") + self.assertEqual( + ugrid.Mesh2DNames("foo", "bar", "baz"), self.mesh.dimension_names() + ) + + self.mesh.dimension_names_reset(True, True, True) + self.assertEqual(default, self.mesh.dimension_names()) + + # Single. + self.mesh.dimension_names(face="foo") + self.assertEqual("foo", self.mesh.face_dimension) + self.mesh.dimension_names_reset(face=True) + self.assertEqual(default, self.mesh.dimension_names()) + + def test_face_dimension_set(self): + self.mesh.face_dimension = "foo" + self.assertEqual("foo", self.mesh.face_dimension) + + def test_remove_connectivities(self): + """Do what 1D test could not - test removal of optional connectivity.""" + + # Add an optional connectivity. + self.mesh.add_connectivities(self.FACE_FACE) + # Attempt to remove a non-existent connectivity. + self.mesh.remove_connectivities(self.EDGE_NODE) + # Confirm that FACE_FACE is still there. + self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) + # Remove FACE_FACE and confirm success. + self.mesh.remove_connectivities(contains_face=True) + self.assertEqual(None, self.mesh.face_face_connectivity) + + def test_remove_coords(self): + """Test the face argument.""" + super().test_remove_coords() + self.mesh.add_coords(face_x=self.FACE_LON) + self.assertEqual(self.FACE_LON, self.mesh.face_coords.face_x) + self.mesh.remove_coords(include_faces=True) + self.assertEqual(None, self.mesh.face_coords.face_x) + + def test_to_MeshCoord_face(self): + self.mesh.add_coords(face_x=self.FACE_LON) + location = "face" + axis = "x" + result = self.mesh.to_MeshCoord(location, axis) + self.assertIsInstance(result, ugrid.MeshCoord) + self.assertEqual(location, result.location) + self.assertEqual(axis, result.axis) + + def test_to_MeshCoords_face(self): + self.mesh.add_coords(face_x=self.FACE_LON, face_y=self.FACE_LAT) + location = "face" + result = self.mesh.to_MeshCoords(location) + self.assertEqual(len(self.mesh.AXES), len(result)) + for ix, axis in enumerate(self.mesh.AXES): + coord = result[ix] + self.assertIsInstance(coord, ugrid.MeshCoord) + self.assertEqual(location, coord.location) + self.assertEqual(axis, coord.axis) + + +class InitValidation(TestMeshCommon): + def test_invalid_topology(self): + kwargs = { + "topology_dimension": 0, + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": self.EDGE_NODE, + } + self.assertRaisesRegex( + ValueError, "Expected 'topology_dimension'.*", ugrid.Mesh, **kwargs + ) + + def test_invalid_axes(self): + kwargs = { + "topology_dimension": 2, + "connectivities": self.FACE_NODE, + } + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for node.*", + ugrid.Mesh, + node_coords_and_axes=( + (self.NODE_LON, "foo"), + (self.NODE_LAT, "y"), + ), + **kwargs, + ) + kwargs["node_coords_and_axes"] = ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ) + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for edge.*", + ugrid.Mesh, + edge_coords_and_axes=((self.EDGE_LON, "foo"),), + **kwargs, + ) + self.assertRaisesRegex( + ValueError, + "Invalid axis specified for face.*", + ugrid.Mesh, + face_coords_and_axes=((self.FACE_LON, "foo"),), + **kwargs, + ) + + # Several arg safety checks in __init__ currently unreachable given earlier checks. + + def test_minimum_connectivities(self): + # Further validations are tested in add_connectivity tests. + kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ( + (self.NODE_LON, "x"), + (self.NODE_LAT, "y"), + ), + "connectivities": (self.FACE_NODE,), + } + self.assertRaisesRegex( + ValueError, + ".*requires a edge_node_connectivity.*", + ugrid.Mesh, + **kwargs, + ) + + def test_minimum_coords(self): + # Further validations are tested in add_coord tests. + kwargs = { + "topology_dimension": 1, + "node_coords_and_axes": ((self.NODE_LON, "x"), (None, "y")), + "connectivities": (self.FACE_NODE,), + } + self.assertRaisesRegex( + ValueError, ".*is a required coordinate.*", ugrid.Mesh, **kwargs + ) diff --git a/lib/iris/tests/unit/experimental/ugrid/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/test_MeshCoord.py new file mode 100644 index 0000000000..4f067e6100 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_MeshCoord.py @@ -0,0 +1,719 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.MeshCoord`. + +""" +import unittest.mock as mock + +import dask.array as da +import numpy as np + +from iris._lazy_data import is_lazy_data +from iris.common.metadata import BaseMetadata +from iris.coords import AuxCoord, Coord +from iris.cube import Cube +from iris.experimental.ugrid import Connectivity, Mesh, MeshCoord + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +import iris.tests.stock.mesh +from iris.tests.stock.mesh import sample_mesh, sample_meshcoord + + +class Test___init__(tests.IrisTest): + def setUp(self): + mesh = sample_mesh() + self.mesh = mesh + self.meshcoord = sample_meshcoord(mesh=mesh) + + def test_basic(self): + meshcoord = self.meshcoord + self.assertEqual(meshcoord.mesh, self.mesh) + self.assertEqual(meshcoord.location, "face") + self.assertEqual(meshcoord.axis, "x") + self.assertIsInstance(meshcoord, MeshCoord) + self.assertIsInstance(meshcoord, Coord) + + def test_derived_properties(self): + # Check the derived properties of the meshcoord against the correct + # underlying mesh coordinate. + for axis in Mesh.AXES: + meshcoord = sample_meshcoord(axis=axis) + # N.B. + node_x_coord = meshcoord.mesh.coord(include_nodes=True, axis=axis) + for key in node_x_coord.metadata._fields: + meshval = getattr(meshcoord, key) + if key == "var_name": + # var_name is unused. + self.assertIsNone(meshval) + else: + # names, units and attributes are derived from the node coord. + self.assertEqual(meshval, getattr(node_x_coord, key)) + + def test_fail_bad_mesh(self): + with self.assertRaisesRegex(TypeError, "must be a.*Mesh"): + sample_meshcoord(mesh=mock.sentinel.odd) + + def test_valid_locations(self): + for loc in Mesh.LOCATIONS: + meshcoord = sample_meshcoord(location=loc) + self.assertEqual(meshcoord.location, loc) + + def test_fail_bad_location(self): + with self.assertRaisesRegex(ValueError, "not a valid Mesh location"): + sample_meshcoord(location="bad") + + def test_fail_bad_axis(self): + with self.assertRaisesRegex(ValueError, "not a valid Mesh axis"): + sample_meshcoord(axis="q") + + +class Test__readonly_properties(tests.IrisTest): + def setUp(self): + self.meshcoord = sample_meshcoord() + + def test_fixed_metadata(self): + # Check that you cannot set any of these on an existing MeshCoord. + meshcoord = self.meshcoord + for prop in ("mesh", "location", "axis"): + with self.assertRaisesRegex(AttributeError, "can't set"): + setattr(meshcoord, prop, mock.sentinel.odd) + + def test_coord_system(self): + # The property exists, =None, can set to None, can not set otherwise. + self.assertTrue(hasattr(self.meshcoord, "coord_system")) + self.assertIsNone(self.meshcoord.coord_system) + self.meshcoord.coord_system = None + with self.assertRaisesRegex(ValueError, "Cannot set.* MeshCoord"): + self.meshcoord.coord_system = 1 + + def test_set_climatological(self): + # The property exists, =False, can set to False, can not set otherwise. + self.assertTrue(hasattr(self.meshcoord, "climatological")) + self.assertFalse(self.meshcoord.climatological) + self.meshcoord.climatological = False + with self.assertRaisesRegex(ValueError, "Cannot set.* MeshCoord"): + self.meshcoord.climatological = True + + +class Test__inherited_properties(tests.IrisTest): + """ + Check the settability and effect on equality of the common BaseMetadata + properties inherited from Coord : i.e. names/units/attributes. + + Though copied from the mesh at creation, they are also changeable. + + """ + + def setUp(self): + self.meshcoord = sample_meshcoord() + + def test_inherited_properties(self): + # Check that these are settable, and affect equality. + meshcoord = self.meshcoord + # Add an existing attribute, so we can change it. + meshcoord.attributes["thing"] = 7 + for prop in BaseMetadata._fields: + meshcoord2 = meshcoord.copy() + if "name" in prop: + # Use a standard-name, can do for any of them. + setattr(meshcoord2, prop, "height") + elif prop == "units": + meshcoord2.units = "Pa" + elif prop == "attributes": + meshcoord2.attributes["thing"] = 77 + self.assertNotEqual(meshcoord2, meshcoord) + + +class Test__points_and_bounds(tests.IrisTest): + # Basic method testing only, for 3 locations with simple array values. + # See Test_MeshCoord__dataviews for more detailed checks. + def test_node(self): + meshcoord = sample_meshcoord(location="node") + n_nodes = ( + iris.tests.stock.mesh._TEST_N_NODES + ) # n-nodes default for sample mesh + self.assertIsNone(meshcoord.core_bounds()) + self.assertArrayAllClose(meshcoord.points, 1100 + np.arange(n_nodes)) + + def test_edge(self): + meshcoord = sample_meshcoord(location="edge") + points, bounds = meshcoord.core_points(), meshcoord.core_bounds() + self.assertEqual(points.shape, meshcoord.shape) + self.assertEqual(bounds.shape, meshcoord.shape + (2,)) + self.assertArrayAllClose( + meshcoord.points, [2100, 2101, 2102, 2103, 2104] + ) + self.assertArrayAllClose( + meshcoord.bounds, + [ + (1105, 1106), + (1107, 1108), + (1109, 1110), + (1111, 1112), + (1113, 1114), + ], + ) + + def test_face(self): + meshcoord = sample_meshcoord(location="face") + points, bounds = meshcoord.core_points(), meshcoord.core_bounds() + self.assertEqual(points.shape, meshcoord.shape) + self.assertEqual(bounds.shape, meshcoord.shape + (4,)) + self.assertArrayAllClose(meshcoord.points, [3100, 3101, 3102]) + self.assertArrayAllClose( + meshcoord.bounds, + [ + (1100, 1101, 1102, 1103), + (1104, 1105, 1106, 1107), + (1108, 1109, 1110, 1111), + ], + ) + + +class Test___eq__(tests.IrisTest): + def setUp(self): + self.mesh = sample_mesh() + + def _create_common_mesh(self, **kwargs): + return sample_meshcoord(mesh=self.mesh, **kwargs) + + def test_same_mesh(self): + meshcoord1 = self._create_common_mesh() + meshcoord2 = self._create_common_mesh() + self.assertEqual(meshcoord2, meshcoord1) + + def test_different_identical_mesh(self): + # For equality, must have the SAME mesh (at present). + mesh1 = sample_mesh() + mesh2 = sample_mesh() # Presumably identical, but not the same + meshcoord1 = sample_meshcoord(mesh=mesh1) + meshcoord2 = sample_meshcoord(mesh=mesh2) + # These should NOT compare, because the Meshes are not identical : at + # present, Mesh equality is not implemented (i.e. limited to identity) + self.assertNotEqual(meshcoord2, meshcoord1) + + def test_different_location(self): + meshcoord = self._create_common_mesh() + meshcoord2 = self._create_common_mesh(location="node") + self.assertNotEqual(meshcoord2, meshcoord) + + def test_different_axis(self): + meshcoord = self._create_common_mesh() + meshcoord2 = self._create_common_mesh(axis="y") + self.assertNotEqual(meshcoord2, meshcoord) + + +class Test__copy(tests.IrisTest): + def test_basic(self): + meshcoord = sample_meshcoord() + meshcoord2 = meshcoord.copy() + self.assertIsNot(meshcoord2, meshcoord) + self.assertEqual(meshcoord2, meshcoord) + # In this case, they should share *NOT* copy the Mesh object. + self.assertIs(meshcoord2.mesh, meshcoord.mesh) + + def test_fail_copy_newpoints(self): + meshcoord = sample_meshcoord() + with self.assertRaisesRegex(ValueError, "Cannot change the content"): + meshcoord.copy(points=meshcoord.points) + + def test_fail_copy_newbounds(self): + meshcoord = sample_meshcoord() + with self.assertRaisesRegex(ValueError, "Cannot change the content"): + meshcoord.copy(bounds=meshcoord.bounds) + + +class Test__getitem__(tests.IrisTest): + def test_slice_wholeslice_1tuple(self): + # The only slicing case that we support, to enable cube slicing. + meshcoord = sample_meshcoord() + meshcoord2 = meshcoord[ + :, + ] + self.assertIsNot(meshcoord2, meshcoord) + self.assertEqual(meshcoord2, meshcoord) + # In this case, we should *NOT* copy the linked Mesh object. + self.assertIs(meshcoord2.mesh, meshcoord.mesh) + + def test_slice_whole_slice_singlekey(self): + # A slice(None) also fails, if not presented in a 1-tuple. + meshcoord = sample_meshcoord() + with self.assertRaisesRegex(ValueError, "Cannot index"): + meshcoord[:] + + def test_fail_slice_part(self): + meshcoord = sample_meshcoord() + with self.assertRaisesRegex(ValueError, "Cannot index"): + meshcoord[:1] + + +class Test__str_repr(tests.IrisTest): + def setUp(self): + mesh = sample_mesh() + self.mesh = mesh + # Give mesh itself a name: makes a difference between str and repr. + self.mesh.rename("test_mesh") + self.meshcoord = sample_meshcoord(mesh=mesh) + + def _expected_elements_regexp( + self, + mesh_strstyle=True, + standard_name=True, + long_name=True, + attributes=True, + ): + regexp = r"^MeshCoord\(mesh=" + if mesh_strstyle: + regexp += r"Mesh\('test_mesh'\)" + else: + regexp += "" + regexp += r", location='face', axis='x', shape=\(3,\)" + if standard_name: + regexp += ", standard_name='longitude'" + regexp += r", units=Unit\('degrees_east'\)" + if long_name: + regexp += ", long_name='long-name'" + if attributes: + regexp += r", attributes={'a': 1, 'b': 'c'}" + regexp += r"\)$" + return regexp + + def test_repr(self): + result = repr(self.meshcoord) + re_expected = self._expected_elements_regexp(mesh_strstyle=False) + self.assertRegex(result, re_expected) + + def test__str__(self): + result = str(self.meshcoord) + re_expected = self._expected_elements_regexp(mesh_strstyle=True) + self.assertRegex(result, re_expected) + + def test_alternative_location_and_axis(self): + meshcoord = sample_meshcoord(mesh=self.mesh, location="edge", axis="y") + result = str(meshcoord) + re_expected = r", location='edge', axis='y'" + self.assertRegex(result, re_expected) + + def test_str_no_long_name(self): + mesh = self.mesh + # Remove the long_name of the node coord in the mesh. + node_coord = mesh.coord(include_nodes=True, axis="x") + node_coord.long_name = None + # Make a new meshcoord, based on the modified mesh. + meshcoord = sample_meshcoord(mesh=self.mesh) + result = str(meshcoord) + re_expected = self._expected_elements_regexp(long_name=False) + self.assertRegex(result, re_expected) + + def test_str_no_standard_name(self): + mesh = self.mesh + # Remove the standard_name of the node coord in the mesh. + node_coord = mesh.coord(include_nodes=True, axis="x") + node_coord.standard_name = None + node_coord.axis = "x" # This is required : but it's a kludge !! + # Make a new meshcoord, based on the modified mesh. + meshcoord = sample_meshcoord(mesh=self.mesh) + result = str(meshcoord) + re_expected = self._expected_elements_regexp(standard_name=False) + self.assertRegex(result, re_expected) + + def test_str_no_attributes(self): + mesh = self.mesh + # No attributes on the node coord in the mesh. + node_coord = mesh.coord(include_nodes=True, axis="x") + node_coord.attributes = None + # Make a new meshcoord, based on the modified mesh. + meshcoord = sample_meshcoord(mesh=self.mesh) + result = str(meshcoord) + re_expected = self._expected_elements_regexp(attributes=False) + self.assertRegex(result, re_expected) + + def test_str_empty_attributes(self): + mesh = self.mesh + # Empty attributes dict on the node coord in the mesh. + node_coord = mesh.coord(include_nodes=True, axis="x") + node_coord.attributes.clear() + # Make a new meshcoord, based on the modified mesh. + meshcoord = sample_meshcoord(mesh=self.mesh) + result = str(meshcoord) + re_expected = self._expected_elements_regexp(attributes=False) + self.assertRegex(result, re_expected) + + +class Test_cube_containment(tests.IrisTest): + # Check that we can put a MeshCoord into a cube, and have it behave just + # like a regular AuxCoord. + def setUp(self): + meshcoord = sample_meshcoord() + data_shape = (2,) + meshcoord.shape + cube = Cube(np.zeros(data_shape)) + cube.add_aux_coord(meshcoord, 1) + self.meshcoord = meshcoord + self.cube = cube + + def test_added_to_cube(self): + meshcoord = self.meshcoord + cube = self.cube + self.assertIn(meshcoord, cube.coords()) + + def test_cube_dims(self): + meshcoord = self.meshcoord + cube = self.cube + self.assertEqual(meshcoord.cube_dims(cube), (1,)) + self.assertEqual(cube.coord_dims(meshcoord), (1,)) + + def test_find_by_name(self): + meshcoord = self.meshcoord + cube = self.cube + self.assertIs(cube.coord(standard_name="longitude"), meshcoord) + self.assertIs(cube.coord(long_name="long-name"), meshcoord) + + def test_find_by_axis(self): + meshcoord = self.meshcoord + cube = self.cube + self.assertIs(cube.coord(axis="x"), meshcoord) + self.assertEqual(cube.coords(axis="y"), []) + + # NOTE: the meshcoord.axis takes precedence over the older + # "guessed axis" approach. So the standard_name does not control it. + meshcoord.rename("latitude") + self.assertIs(cube.coord(axis="x"), meshcoord) + self.assertEqual(cube.coords(axis="y"), []) + + def test_cube_copy(self): + # Check that we can copy a cube, and get a MeshCoord == the original. + # Note: currently must have the *same* mesh, as for MeshCoord.copy(). + meshcoord = self.meshcoord + cube = self.cube + cube2 = cube.copy() + meshco2 = cube2.coord(meshcoord) + self.assertIsNot(meshco2, meshcoord) + self.assertEqual(meshco2, meshcoord) + + def test_cube_nonmesh_slice(self): + # Check that we can slice a cube on a non-mesh dimension, and get a + # meshcoord == original. + # Note: currently this must have the *same* mesh, as for .copy(). + meshcoord = self.meshcoord + cube = self.cube + cube2 = cube[:1] # Make a reduced copy, slicing the non-mesh dim + meshco2 = cube2.coord(meshcoord) + self.assertIsNot(meshco2, meshcoord) + self.assertEqual(meshco2, meshcoord) + + def test_cube_mesh_partslice(self): + # Check that we can *not* get a partial MeshCoord slice, as the + # MeshCoord refuses to be sliced. + # Instead, you get an AuxCoord created from the MeshCoord. + meshcoord = self.meshcoord + cube = self.cube + cube2 = cube[:, :1] # Make a reduced copy, slicing the mesh dim + + # The resulting coord can not be identified with the original. + # (i.e. metadata does not match) + co_matches = cube2.coords(meshcoord) + self.assertEqual(co_matches, []) + + # The resulting coord is an AuxCoord instead of a MeshCoord, but the + # values match. + co2 = cube2.coord(meshcoord.name()) + self.assertFalse(isinstance(co2, MeshCoord)) + self.assertIsInstance(co2, AuxCoord) + self.assertArrayAllClose(co2.points, meshcoord.points[:1]) + self.assertArrayAllClose(co2.bounds, meshcoord.bounds[:1]) + + +class Test_auxcoord_conversion(tests.IrisTest): + def test_basic(self): + meshcoord = sample_meshcoord() + auxcoord = AuxCoord.from_coord(meshcoord) + for propname, auxval in auxcoord.metadata._asdict().items(): + meshval = getattr(meshcoord, propname) + self.assertEqual(auxval, meshval) + # Also check array content. + self.assertArrayAllClose(auxcoord.points, meshcoord.points) + self.assertArrayAllClose(auxcoord.bounds, meshcoord.bounds) + + +class Test_MeshCoord__dataviews(tests.IrisTest): + """ + Fuller testing of points and bounds calculations and behaviour. + Including connectivity missing-points (non-square faces). + + """ + + def setUp(self): + self._make_test_meshcoord() + + def _make_test_meshcoord( + self, + lazy_sources=False, + location="face", + inds_start_index=0, + inds_src_dim=0, + facenodes_changes=None, + ): + # Construct a miniature face-nodes mesh for testing. + # NOTE: we will make our connectivity arrays with standard + # start_index=0 and src_dim=0 : We only adjust that (if required) when + # creating the actual connectivities. + face_nodes_array = np.array( + [ + [0, 2, 1, 3], + [1, 3, 10, 13], + [2, 7, 9, 19], + [ + 3, + 4, + 7, + -1, + ], # This one has a "missing" point (it's a triangle) + [8, 1, 7, 2], + ] + ) + # Connectivity uses *masked* for missing points. + face_nodes_array = np.ma.masked_less(face_nodes_array, 0) + if facenodes_changes: + facenodes_changes = facenodes_changes.copy() + facenodes_changes.pop("n_extra_bad_points") + for indices, value in facenodes_changes.items(): + face_nodes_array[indices] = value + + # Construct a miniature edge-nodes mesh for testing. + edge_nodes_array = np.array([[0, 2], [1, 3], [1, 4], [3, 7]]) + # Connectivity uses *masked* for missing points. + edge_nodes_array = np.ma.masked_less(edge_nodes_array, 0) + + n_faces = face_nodes_array.shape[0] + n_edges = edge_nodes_array.shape[0] + n_nodes = int(face_nodes_array.max() + 1) + self.NODECOORDS_BASENUM = 1100.0 + self.EDGECOORDS_BASENUM = 1200.0 + self.FACECOORDS_BASENUM = 1300.0 + node_xs = self.NODECOORDS_BASENUM + np.arange(n_nodes) + edge_xs = self.EDGECOORDS_BASENUM + np.arange(n_edges) + face_xs = self.FACECOORDS_BASENUM + np.arange(n_faces) + + # Record all these for re-use in tests + self.n_faces = n_faces + self.n_nodes = n_nodes + self.face_xs = face_xs + self.node_xs = node_xs + self.edge_xs = edge_xs + self.face_nodes_array = face_nodes_array + self.edge_nodes_array = edge_nodes_array + + # convert source data to Dask arrays if asked. + if lazy_sources: + + def lazify(arr): + return da.from_array(arr, chunks=-1, meta=np.ndarray) + + node_xs = lazify(node_xs) + face_xs = lazify(face_xs) + edge_xs = lazify(edge_xs) + face_nodes_array = lazify(face_nodes_array) + edge_nodes_array = lazify(edge_nodes_array) + + # Build a mesh with this info stored in it. + co_nodex = AuxCoord( + node_xs, standard_name="longitude", long_name="node_x", units=1 + ) + co_facex = AuxCoord( + face_xs, standard_name="longitude", long_name="face_x", units=1 + ) + co_edgex = AuxCoord( + edge_xs, standard_name="longitude", long_name="edge_x", units=1 + ) + # N.B. the Mesh requires 'Y's as well. + co_nodey = co_nodex.copy() + co_nodey.rename("latitude") + co_nodey.long_name = "node_y" + co_facey = co_facex.copy() + co_facey.rename("latitude") + co_facey.long_name = "face_y" + co_edgey = co_edgex.copy() + co_edgey.rename("edge_y") + co_edgey.long_name = "edge_y" + + face_node_conn = Connectivity( + inds_start_index + + ( + face_nodes_array.transpose() + if inds_src_dim == 1 + else face_nodes_array + ), + cf_role="face_node_connectivity", + long_name="face_nodes", + start_index=inds_start_index, + src_dim=inds_src_dim, + ) + + edge_node_conn = Connectivity( + inds_start_index + + ( + edge_nodes_array.transpose() + if inds_src_dim == 1 + else edge_nodes_array + ), + cf_role="edge_node_connectivity", + long_name="edge_nodes", + start_index=inds_start_index, + src_dim=inds_src_dim, + ) + + self.mesh = Mesh( + topology_dimension=2, + node_coords_and_axes=[(co_nodex, "x"), (co_nodey, "y")], + connectivities=[face_node_conn, edge_node_conn], + face_coords_and_axes=[(co_facex, "x"), (co_facey, "y")], + edge_coords_and_axes=[(co_edgex, "x"), (co_edgey, "y")], + ) + + # Construct a test meshcoord. + meshcoord = MeshCoord(mesh=self.mesh, location=location, axis="x") + self.meshcoord = meshcoord + return meshcoord + + def _check_expected_points_values(self): + # The points are just the face_x-s + meshcoord = self.meshcoord + self.assertArrayAllClose(meshcoord.points, self.face_xs) + + def _check_expected_bounds_values(self, facenodes_changes=None): + mesh_coord = self.meshcoord + # The bounds are selected node_x-s, ==> node_number + coords-offset + result = mesh_coord.bounds + # N.B. result should be masked where the masked indices are. + expected = self.NODECOORDS_BASENUM + self.face_nodes_array + if facenodes_changes: + # ALSO include any "bad" values in that calculation. + bad_values = (self.face_nodes_array < 0) | ( + self.face_nodes_array >= self.n_nodes + ) + expected[bad_values] = np.ma.masked + # Check there are *some* masked points. + n_missing_expected = 1 + if facenodes_changes: + n_missing_expected += facenodes_changes["n_extra_bad_points"] + self.assertEqual(np.count_nonzero(expected.mask), n_missing_expected) + # Check results match, *including* location of masked points. + self.assertMaskedArrayAlmostEqual(result, expected) + + def test_points_values(self): + """Basic points content check, on real data.""" + meshcoord = self.meshcoord + self.assertFalse(meshcoord.has_lazy_points()) + self.assertFalse(meshcoord.has_lazy_bounds()) + self._check_expected_points_values() + + def test_bounds_values(self): + """Basic bounds contents check.""" + meshcoord = self.meshcoord + self.assertFalse(meshcoord.has_lazy_points()) + self.assertFalse(meshcoord.has_lazy_bounds()) + self._check_expected_bounds_values() + + def test_lazy_points_values(self): + """Check lazy points calculation on lazy inputs.""" + # Remake the test data with lazy source coords. + meshcoord = self._make_test_meshcoord(lazy_sources=True) + self.assertTrue(meshcoord.has_lazy_points()) + self.assertTrue(meshcoord.has_lazy_bounds()) + # Check values, as previous. + self._check_expected_points_values() + + def test_lazy_bounds_values(self): + meshcoord = self._make_test_meshcoord(lazy_sources=True) + self.assertTrue(meshcoord.has_lazy_points()) + self.assertTrue(meshcoord.has_lazy_bounds()) + # Check values, as previous. + self._check_expected_bounds_values() + + def test_edge_points(self): + meshcoord = self._make_test_meshcoord(location="edge") + result = meshcoord.points + self.assertArrayAllClose(result, self.edge_xs) + + def test_edge_bounds(self): + meshcoord = self._make_test_meshcoord(location="edge") + result = meshcoord.bounds + # The bounds are selected node_x-s : all == node_number + 100.0 + expected = self.NODECOORDS_BASENUM + self.edge_nodes_array + # NB simpler than faces : no possibility of missing points + self.assertArrayAlmostEqual(result, expected) + + def test_bounds_connectivity__src_dim_1(self): + # Test with a transposed indices array. + self._make_test_meshcoord(inds_src_dim=1) + self._check_expected_bounds_values() + + def test_bounds_connectivity__start_index_1(self): + # Test 1-based indices. + self._make_test_meshcoord(inds_start_index=1) + self._check_expected_bounds_values() + + def test_meshcoord_leaves_originals_lazy(self): + self._make_test_meshcoord(lazy_sources=True) + mesh = self.mesh + meshcoord = self.meshcoord + + # Fetch the relevant source objects from the mesh. + def fetch_sources_from_mesh(): + return ( + mesh.coord(include_nodes=True, axis="x"), + mesh.coord(include_faces=True, axis="x"), + mesh.face_node_connectivity, + ) + + # Check all the source coords are lazy. + for coord in fetch_sources_from_mesh(): + # Note: not all are actual Coords, so can't use 'has_lazy_points'. + self.assertTrue(is_lazy_data(coord._core_values())) + + # Calculate both points + bounds of the meshcoord + self.assertTrue(meshcoord.has_lazy_points()) + self.assertTrue(meshcoord.has_lazy_bounds()) + meshcoord.points + meshcoord.bounds + self.assertFalse(meshcoord.has_lazy_points()) + self.assertFalse(meshcoord.has_lazy_bounds()) + + # Check all the source coords are still lazy. + for coord in fetch_sources_from_mesh(): + # Note: not all are actual Coords, so can't use 'has_lazy_points'. + self.assertTrue(is_lazy_data(coord._core_values())) + + def _check_bounds_bad_index_values(self, lazy): + facenodes_modify = { + # nothing wrong with this one + (2, 1): 1, + # extra missing point, normal "missing" indicator + (3, 3): np.ma.masked, + # bad index > n_nodes + (4, 2): 100, + # NOTE: **can't** set an index < 0, as it is rejected by the + # Connectivity validity check. + # Indicate how many "extra" missing results this should cause. + "n_extra_bad_points": 2, + } + self._make_test_meshcoord( + facenodes_changes=facenodes_modify, lazy_sources=lazy + ) + self._check_expected_bounds_values() + + def test_bounds_badvalues__real(self): + self._check_bounds_bad_index_values(lazy=False) + + def test_bounds_badvalues__lazy(self): + self._check_bounds_bad_index_values(lazy=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/test_MeshCoordMetadata.py new file mode 100644 index 0000000000..a7d24fa794 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_MeshCoordMetadata.py @@ -0,0 +1,733 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.MeshCoordMetadata`. + +""" + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata +from iris.experimental.ugrid import MeshCoordMetadata + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class Test__identity(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.location = mock.sentinel.location + self.axis = mock.sentinel.axis + self.cls = MeshCoordMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + location=self.location, + axis=self.axis, + ) + fmt = ( + "MeshCoordMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, " + "location={!r}, axis={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.location, + self.axis, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "location", + "axis", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test__eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + location=sentinel.location, + axis=sentinel.axis, + ) + self.dummy = sentinel.dummy + self.cls = MeshCoordMetadata + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none_nonmember(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = MeshCoordMetadata + values = [1] * len(self.cls._fields) + self.one = self.cls(*values) + + values_two = values[:] + values_two[2] = 2 + self.two = self.cls(*values_two) + + values_none = values[:] + values_none[2] = None + self.none = self.cls(*values_none) + + values_attrs = values[:] + values_attrs[4] = 10 + self.attributes = self.cls(*values_attrs) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.cls = MeshCoordMetadata + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + location=sentinel.location, + axis=sentinel.axis, + ) + self.dummy = sentinel.dummy + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__ + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.cls = MeshCoordMetadata + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + location=sentinel.location, + axis=sentinel.axis, + ) + self.dummy = sentinel.dummy + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__ + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + member_value = getattr(lmetadata, member) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (member_value, None) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = (None, member_value) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = MeshCoordMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py new file mode 100644 index 0000000000..fe87ffab59 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_MeshMetadata.py @@ -0,0 +1,784 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.MeshMetadata`. + +""" + +from copy import deepcopy +import unittest.mock as mock +from unittest.mock import sentinel + +from iris.common.lenient import _LENIENT, _qualname +from iris.common.metadata import BaseMetadata +from iris.experimental.ugrid import MeshMetadata + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class Test(tests.IrisTest): + def setUp(self): + self.standard_name = mock.sentinel.standard_name + self.long_name = mock.sentinel.long_name + self.var_name = mock.sentinel.var_name + self.units = mock.sentinel.units + self.attributes = mock.sentinel.attributes + self.topology_dimension = mock.sentinel.topology_dimension + self.node_dimension = mock.sentinel.node_dimension + self.edge_dimension = mock.sentinel.edge_dimension + self.face_dimension = mock.sentinel.face_dimension + self.cls = MeshMetadata + + def test_repr(self): + metadata = self.cls( + standard_name=self.standard_name, + long_name=self.long_name, + var_name=self.var_name, + units=self.units, + attributes=self.attributes, + topology_dimension=self.topology_dimension, + node_dimension=self.node_dimension, + edge_dimension=self.edge_dimension, + face_dimension=self.face_dimension, + ) + fmt = ( + "MeshMetadata(standard_name={!r}, long_name={!r}, " + "var_name={!r}, units={!r}, attributes={!r}, " + "topology_dimension={!r}, node_dimension={!r}, " + "edge_dimension={!r}, face_dimension={!r})" + ) + expected = fmt.format( + self.standard_name, + self.long_name, + self.var_name, + self.units, + self.attributes, + self.topology_dimension, + self.node_dimension, + self.edge_dimension, + self.face_dimension, + ) + self.assertEqual(expected, repr(metadata)) + + def test__fields(self): + expected = ( + "standard_name", + "long_name", + "var_name", + "units", + "attributes", + "topology_dimension", + "node_dimension", + "edge_dimension", + "face_dimension", + ) + self.assertEqual(self.cls._fields, expected) + + def test_bases(self): + self.assertTrue(issubclass(self.cls, BaseMetadata)) + + +class Test__eq__(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + # The "node_dimension", "edge_dimension" and "face_dimension" members + # are stateful only; they do not participate in lenient/strict equivalence. + self.members_dim_names = filter( + lambda member: member + in ("node_dimension", "edge_dimension", "face_dimension"), + self.cls._members, + ) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) + + def test_lenient_service(self): + qualname___eq__ = _qualname(self.cls.__eq__) + self.assertIn(qualname___eq__, _LENIENT) + self.assertTrue(_LENIENT[qualname___eq__]) + self.assertTrue(_LENIENT[self.cls.__eq__]) + + def test_call(self): + other = sentinel.other + return_value = sentinel.return_value + metadata = self.cls(*(None,) * len(self.cls._fields)) + with mock.patch.object( + BaseMetadata, "__eq__", return_value=return_value + ) as mocker: + result = metadata.__eq__(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_topology_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_same_dim_names_none(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_topology_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_lenient_different_dim_names(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_topology_dim(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_dim_names(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_topology_dim_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["topology_dimension"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertFalse(lmetadata.__eq__(rmetadata)) + self.assertFalse(rmetadata.__eq__(lmetadata)) + + def test_op_strict_different_dim_names_none(self): + for member in self.members_dim_names: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertTrue(lmetadata.__eq__(rmetadata)) + self.assertTrue(rmetadata.__eq__(lmetadata)) + + +class Test___lt__(tests.IrisTest): + def setUp(self): + self.cls = MeshMetadata + self.one = self.cls(1, 1, 1, 1, 1, 1, 1, 1, 1) + self.two = self.cls(1, 1, 1, 2, 1, 1, 1, 1, 1) + self.none = self.cls(1, 1, 1, None, 1, 1, 1, 1, 1) + self.attributes = self.cls(1, 1, 1, 1, 10, 1, 1, 1, 1) + + def test__ascending_lt(self): + result = self.one < self.two + self.assertTrue(result) + + def test__descending_lt(self): + result = self.two < self.one + self.assertFalse(result) + + def test__none_rhs_operand(self): + result = self.one < self.none + self.assertFalse(result) + + def test__none_lhs_operand(self): + result = self.none < self.one + self.assertTrue(result) + + def test__ignore_attributes(self): + result = self.one < self.attributes + self.assertFalse(result) + result = self.attributes < self.one + self.assertFalse(result) + + +class Test_combine(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.combine.__doc__, self.cls.combine.__doc__ + ) + + def test_lenient_service(self): + qualname_combine = _qualname(self.cls.combine) + self.assertIn(qualname_combine, _LENIENT) + self.assertTrue(_LENIENT[qualname_combine]) + self.assertTrue(_LENIENT[self.cls.combine]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "combine", return_value=return_value + ) as mocker: + result = self.none.combine(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + expected = self.values + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = right.copy() + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["units"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + expected = self.values.copy() + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected["long_name"] = None + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) + self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + expected = self.values.copy() + expected[member] = None + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + expected, lmetadata.combine(rmetadata)._asdict() + ) + self.assertEqual( + expected, rmetadata.combine(lmetadata)._asdict() + ) + + +class Test_difference(tests.IrisTest): + def setUp(self): + self.values = dict( + standard_name=sentinel.standard_name, + long_name=sentinel.long_name, + var_name=sentinel.var_name, + units=sentinel.units, + attributes=sentinel.attributes, + topology_dimension=sentinel.topology_dimension, + node_dimension=sentinel.node_dimension, + edge_dimension=sentinel.edge_dimension, + face_dimension=sentinel.face_dimension, + ) + self.dummy = sentinel.dummy + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual( + BaseMetadata.difference.__doc__, self.cls.difference.__doc__ + ) + + def test_lenient_service(self): + qualname_difference = _qualname(self.cls.difference) + self.assertIn(qualname_difference, _LENIENT) + self.assertTrue(_LENIENT[qualname_difference]) + self.assertTrue(_LENIENT[self.cls.difference]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "difference", return_value=return_value + ) as mocker: + result = self.none.difference(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + def test_op_lenient_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_none(self): + lmetadata = self.cls(**self.values) + right = self.values.copy() + right["var_name"] = None + rmetadata = self.cls(**right) + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_lenient_same_members_none(self): + for member in self.cls._members: + lmetadata = self.cls(**self.values) + member_value = getattr(lmetadata, member) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (member_value, None) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = (None, member_value) + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["units"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["units"] = (left["units"], right["units"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["units"] = lexpected["units"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=True): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_lenient_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=True + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_same(self): + lmetadata = self.cls(**self.values) + rmetadata = self.cls(**self.values) + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertIsNone(lmetadata.difference(rmetadata)) + self.assertIsNone(rmetadata.difference(lmetadata)) + + def test_op_strict_different(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = self.dummy + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_none(self): + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right["long_name"] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected["long_name"] = (left["long_name"], right["long_name"]) + rexpected = deepcopy(self.none)._asdict() + rexpected["long_name"] = lexpected["long_name"][::-1] + + with mock.patch("iris.common.metadata._LENIENT", return_value=False): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + def test_op_strict_different_members_none(self): + for member in self.cls._members: + left = self.values.copy() + lmetadata = self.cls(**left) + right = self.values.copy() + right[member] = None + rmetadata = self.cls(**right) + lexpected = deepcopy(self.none)._asdict() + lexpected[member] = (left[member], right[member]) + rexpected = deepcopy(self.none)._asdict() + rexpected[member] = lexpected[member][::-1] + + with mock.patch( + "iris.common.metadata._LENIENT", return_value=False + ): + self.assertEqual( + lexpected, lmetadata.difference(rmetadata)._asdict() + ) + self.assertEqual( + rexpected, rmetadata.difference(lmetadata)._asdict() + ) + + +class Test_equal(tests.IrisTest): + def setUp(self): + self.cls = MeshMetadata + self.none = self.cls(*(None,) * len(self.cls._fields)) + + def test_wraps_docstring(self): + self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) + + def test_lenient_service(self): + qualname_equal = _qualname(self.cls.equal) + self.assertIn(qualname_equal, _LENIENT) + self.assertTrue(_LENIENT[qualname_equal]) + self.assertTrue(_LENIENT[self.cls.equal]) + + def test_lenient_default(self): + other = sentinel.other + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=None), kwargs) + + def test_lenient(self): + other = sentinel.other + lenient = sentinel.lenient + return_value = sentinel.return_value + with mock.patch.object( + BaseMetadata, "equal", return_value=return_value + ) as mocker: + result = self.none.equal(other, lenient=lenient) + + self.assertEqual(return_value, result) + self.assertEqual(1, mocker.call_count) + (arg,), kwargs = mocker.call_args + self.assertEqual(other, arg) + self.assertEqual(dict(lenient=lenient), kwargs) + + if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/test_ParseUgridOnLoad.py b/lib/iris/tests/unit/experimental/ugrid/test_ParseUgridOnLoad.py new file mode 100644 index 0000000000..3efb2d8d67 --- /dev/null +++ b/lib/iris/tests/unit/experimental/ugrid/test_ParseUgridOnLoad.py @@ -0,0 +1,46 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :class:`iris.experimental.ugrid.ParseUgridOnLoad` class. + +todo: remove this module when experimental.ugrid is folded into standard behaviour. + +""" +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, ParseUGridOnLoad + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class TestClass(tests.IrisTest): + @classmethod + def setUpClass(cls): + cls.cls = ParseUGridOnLoad() + + def test_default(self): + self.assertFalse(self.cls) + + def test_context(self): + self.assertFalse(self.cls) + with self.cls.context(): + self.assertTrue(self.cls) + self.assertFalse(self.cls) + + +class TestConstant(tests.IrisTest): + @classmethod + def setUpClass(cls): + cls.constant = PARSE_UGRID_ON_LOAD + + def test_default(self): + self.assertFalse(self.constant) + + def test_context(self): + self.assertFalse(self.constant) + with self.constant.context(): + self.assertTrue(self.constant) + self.assertFalse(self.constant) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py new file mode 100644 index 0000000000..bfc2d586ef --- /dev/null +++ b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py @@ -0,0 +1,51 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :class:`iris.fileformats.cf.CFGroup` class.""" + +from unittest.mock import MagicMock + +from iris.fileformats.cf import ( + CFAuxiliaryCoordinateVariable, + CFCoordinateVariable, + CFDataVariable, + CFGroup, +) + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +class Tests(tests.IrisTest): + # TODO: unit tests for existing functionality pre 2021-03-11. + def setUp(self): + self.cf_group = CFGroup() + + def test_non_data_names(self): + data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") + aux_var = MagicMock( + spec=CFAuxiliaryCoordinateVariable, cf_name="aux_var" + ) + coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") + coord_var2 = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var2") + duplicate_name_var = MagicMock( + spec=CFCoordinateVariable, cf_name="aux_var" + ) + + for var in ( + data_var, + aux_var, + coord_var, + coord_var2, + duplicate_name_var, + ): + self.cf_group[var.cf_name] = var + + expected_names = [ + var.cf_name for var in (aux_var, coord_var, coord_var2) + ] + expected = set(expected_names) + self.assertEqual(expected, self.cf_group.non_data_variable_names) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py new file mode 100644 index 0000000000..c4c868cd59 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py @@ -0,0 +1,320 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. + +todo: migrate the remaining unit-esque tests from iris.tests.test_netcdf, + switching to use netcdf.load_cubes() instead of iris.load()/load_cube(). + +""" + +from pathlib import Path +from shutil import rmtree +from subprocess import check_call +import tempfile + +from cf_units import as_unit +import numpy as np + +from iris.coords import AncillaryVariable, CellMeasure +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, MeshCoord +from iris.fileformats.netcdf import load_cubes, logger + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +def setUpModule(): + global TMP_DIR + TMP_DIR = Path(tempfile.mkdtemp()) + + +def tearDownModule(): + if TMP_DIR is not None: + rmtree(TMP_DIR) + + +def cdl_to_nc(cdl): + cdl_path = TMP_DIR / "tst.cdl" + nc_path = TMP_DIR / "tst.nc" + # Write CDL string into a temporary CDL file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl) + # Use ncgen to convert this into an actual (temporary) netCDF file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + check_call(command, shell=True) + return str(nc_path) + + +class Tests(tests.IrisTest): + def test_ancillary_variables(self): + # Note: using a CDL string as a test data reference, rather than a + # binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + variables: + int64 qqv(axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:ancillary_variables = "my_av" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + double my_av(axv) ; + my_av:units = "1" ; + my_av:long_name = "refs" ; + my_av:custom = "extra-attribute"; + data: + axv = 1, 2, 3; + my_av = 11., 12., 13.; + } + """ + nc_path = cdl_to_nc(ref_cdl) + + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + avs = cubes[0].ancillary_variables() + self.assertEqual(len(avs), 1) + expected = AncillaryVariable( + np.ma.array([11.0, 12.0, 13.0]), + long_name="refs", + var_name="my_av", + units="1", + attributes={"custom": "extra-attribute"}, + ) + self.assertEqual(avs[0], expected) + + def test_status_flags(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + variables: + int64 qqv(axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:ancillary_variables = "my_av" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + byte my_av(axv) ; + my_av:long_name = "qq status_flag" ; + my_av:flag_values = 1b, 2b ; + my_av:flag_meanings = "a b" ; + data: + axv = 11, 21, 31; + my_av = 1b, 1b, 2b; + } + """ + nc_path = cdl_to_nc(ref_cdl) + + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + avs = cubes[0].ancillary_variables() + self.assertEqual(len(avs), 1) + expected = AncillaryVariable( + np.ma.array([1, 1, 2], dtype=np.int8), + long_name="qq status_flag", + var_name="my_av", + units="no_unit", + attributes={ + "flag_values": np.array([1, 2], dtype=np.int8), + "flag_meanings": "a b", + }, + ) + self.assertEqual(avs[0], expected) + + def test_cell_measures(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + ayv = 2 ; + variables: + int64 qqv(ayv, axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:cell_measures = "area: my_areas" ; + int64 ayv(ayv) ; + ayv:units = "1" ; + ayv:long_name = "y" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + double my_areas(ayv, axv) ; + my_areas:units = "m2" ; + my_areas:long_name = "standardised cell areas" ; + my_areas:custom = "extra-attribute"; + data: + axv = 11, 12, 13; + ayv = 21, 22; + my_areas = 110., 120., 130., 221., 231., 241.; + } + """ + nc_path = cdl_to_nc(ref_cdl) + + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + cms = cubes[0].cell_measures() + self.assertEqual(len(cms), 1) + expected = CellMeasure( + np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), + measure="area", + var_name="my_areas", + long_name="standardised cell areas", + units="m2", + attributes={"custom": "extra-attribute"}, + ) + self.assertEqual(cms[0], expected) + + def test_default_units(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + ayv = 2 ; + variables: + int64 qqv(ayv, axv) ; + qqv:long_name = "qq" ; + qqv:ancillary_variables = "my_av" ; + qqv:cell_measures = "area: my_areas" ; + int64 ayv(ayv) ; + ayv:long_name = "y" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + double my_av(axv) ; + my_av:long_name = "refs" ; + double my_areas(ayv, axv) ; + my_areas:long_name = "areas" ; + data: + axv = 11, 12, 13; + ayv = 21, 22; + my_areas = 110., 120., 130., 221., 231., 241.; + } + """ + nc_path = cdl_to_nc(ref_cdl) + + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + self.assertEqual(cubes[0].units, as_unit("unknown")) + self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) + self.assertEqual(cubes[0].coord("x").units, as_unit(1)) + self.assertEqual( + cubes[0].ancillary_variable("refs").units, as_unit("unknown") + ) + self.assertEqual( + cubes[0].cell_measure("areas").units, as_unit("unknown") + ) + + +class TestsMesh(tests.IrisTest): + @classmethod + def setUpClass(cls): + cls.ref_cdl = """ + netcdf mesh_test { + dimensions: + node = 3 ; + face = 1 ; + vertex = 3 ; + levels = 2 ; + variables: + int mesh ; + mesh:cf_role = "mesh_topology" ; + mesh:topology_dimension = 2 ; + mesh:node_coordinates = "node_x node_y" ; + mesh:face_coordinates = "face_x face_y" ; + mesh:face_node_connectivity = "face_nodes" ; + float node_x(node) ; + node_x:standard_name = "longitude" ; + float node_y(node) ; + node_y:standard_name = "latitude" ; + float face_x(face) ; + face_x:standard_name = "longitude" ; + float face_y(face) ; + face_y:standard_name = "latitude" ; + int face_nodes(face, vertex) ; + face_nodes:cf_role = "face_node_connectivity" ; + face_nodes:start_index = 0 ; + int levels(levels) ; + float node_data(levels, node) ; + node_data:coordinates = "node_x node_y" ; + node_data:location = "node" ; + node_data:mesh = "mesh" ; + float face_data(levels, face) ; + face_data:coordinates = "face_x face_y" ; + face_data:location = "face" ; + face_data:mesh = "mesh" ; + data: + mesh = 0; + node_x = 0., 2., 1.; + node_y = 0., 0., 1.; + face_x = 0.5; + face_y = 0.5; + face_nodes = 0, 1, 2; + levels = 1, 2; + node_data = 0., 0., 0.; + face_data = 0.; + } + """ + cls.nc_path = cdl_to_nc(cls.ref_cdl) + with PARSE_UGRID_ON_LOAD.context(): + cls.mesh_cubes = list(load_cubes(cls.nc_path)) + + def test_mesh_handled(self): + cubes_no_ugrid = list(load_cubes(self.nc_path)) + self.assertEqual(4, len(cubes_no_ugrid)) + self.assertEqual(2, len(self.mesh_cubes)) + + def test_standard_dims(self): + for cube in self.mesh_cubes: + self.assertIsNotNone(cube.coords("levels")) + + def test_mesh_coord(self): + cube = [ + cube for cube in self.mesh_cubes if cube.var_name == "face_data" + ][0] + face_x = cube.coord("longitude") + face_y = cube.coord("latitude") + + for coord in (face_x, face_y): + self.assertIsInstance(coord, MeshCoord) + self.assertEqual("face", coord.location) + self.assertArrayEqual(np.ma.array([0.5]), coord.points) + + self.assertEqual("x", face_x.axis) + self.assertEqual("y", face_y.axis) + self.assertEqual(face_x.mesh, face_y.mesh) + self.assertArrayEqual(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) + self.assertArrayEqual(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) + + def test_shared_mesh(self): + cube_meshes = [cube.coord("latitude").mesh for cube in self.mesh_cubes] + self.assertEqual(cube_meshes[0], cube_meshes[1]) + + def test_missing_mesh(self): + ref_cdl = self.ref_cdl.replace( + 'face_data:mesh = "mesh"', 'face_data:mesh = "mesh2"' + ) + nc_path = cdl_to_nc(ref_cdl) + + # No error when mesh handling not activated. + _ = list(load_cubes(nc_path)) + + with PARSE_UGRID_ON_LOAD.context(): + log_regex = r"File does not contain mesh.*" + with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): + _ = list(load_cubes(nc_path)) diff --git a/lib/iris/tests/experimental/ugrid/__init__.py b/lib/iris/tests/unit/tests/stock/__init__.py similarity index 77% rename from lib/iris/tests/experimental/ugrid/__init__.py rename to lib/iris/tests/unit/tests/stock/__init__.py index 765c1d09a2..f91390c2b3 100644 --- a/lib/iris/tests/experimental/ugrid/__init__.py +++ b/lib/iris/tests/unit/tests/stock/__init__.py @@ -3,7 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -""" -Ugrid code is tested in this package. - -""" +"""Unit tests for the :mod:`iris.tests.stock` module.""" diff --git a/lib/iris/tests/unit/tests/stock/test_netcdf.py b/lib/iris/tests/unit/tests/stock/test_netcdf.py new file mode 100644 index 0000000000..67b23f5aac --- /dev/null +++ b/lib/iris/tests/unit/tests/stock/test_netcdf.py @@ -0,0 +1,141 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the :mod:`iris.tests.stock.netcdf` module.""" + +import shutil +import tempfile + +from iris import load_cube +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, Mesh, MeshCoord + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests +from iris.tests.stock import netcdf + + +class XIOSFileMixin(tests.IrisTest): + @classmethod + def setUpClass(cls): + # Create a temp directory for transient test files. + cls.temp_dir = tempfile.mkdtemp() + + @classmethod + def tearDownClass(cls): + # Destroy the temp directory. + shutil.rmtree(cls.temp_dir) + + def create_synthetic_file(self, **create_kwargs): + # Should be overridden to invoke one of the create_file_ functions. + # E.g. + # return netcdf.create_file__xios_2d_face_half_levels( + # temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs + # ) + raise NotImplementedError + + def create_synthetic_test_cube(self, **create_kwargs): + file_path = self.create_synthetic_file(**create_kwargs) + with PARSE_UGRID_ON_LOAD.context(): + cube = load_cube(file_path) + return cube + + def check_cube(self, cube, shape, location, level): + # Basic checks on the primary data cube. + self.assertEqual(cube.var_name, "thing") + self.assertEqual(cube.long_name, "thingness") + self.assertEqual(cube.shape, shape) + + # Also a few checks on the attached mesh-related information. + last_dim = cube.ndim - 1 + self.assertIsInstance(cube.mesh, Mesh) + self.assertEqual(cube.mesh_dim(), last_dim) + self.assertEqual(cube.location, location) + for coord_name in ("longitude", "latitude"): + coord = cube.coord(coord_name) + self.assertIsInstance(coord, MeshCoord) + self.assertEqual(coord.shape, (shape[last_dim],)) + self.assertTrue(cube.mesh.var_name.endswith(f"{level}_levels")) + + +class Test_create_file__xios_2d_face_half_levels(XIOSFileMixin): + def create_synthetic_file(self, **create_kwargs): + return netcdf.create_file__xios_2d_face_half_levels( + temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs + ) + + def test_basic_load(self): + cube = self.create_synthetic_test_cube() + self.check_cube(cube, shape=(1, 866), location="face", level="half") + + def test_scale_mesh(self): + cube = self.create_synthetic_test_cube(n_faces=10) + self.check_cube(cube, shape=(1, 10), location="face", level="half") + + def test_scale_time(self): + cube = self.create_synthetic_test_cube(n_times=3) + self.check_cube(cube, shape=(3, 866), location="face", level="half") + + +class Test_create_file__xios_3d_face_half_levels(XIOSFileMixin): + def create_synthetic_file(self, **create_kwargs): + return netcdf.create_file__xios_3d_face_half_levels( + temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs + ) + + def test_basic_load(self): + cube = self.create_synthetic_test_cube() + self.check_cube( + cube, shape=(1, 38, 866), location="face", level="half" + ) + + def test_scale_mesh(self): + cube = self.create_synthetic_test_cube(n_faces=10) + self.check_cube(cube, shape=(1, 38, 10), location="face", level="half") + + def test_scale_time(self): + cube = self.create_synthetic_test_cube(n_times=3) + self.check_cube( + cube, shape=(3, 38, 866), location="face", level="half" + ) + + def test_scale_levels(self): + cube = self.create_synthetic_test_cube(n_levels=10) + self.check_cube( + cube, shape=(1, 10, 866), location="face", level="half" + ) + + +class Test_create_file__xios_3d_face_full_levels(XIOSFileMixin): + def create_synthetic_file(self, **create_kwargs): + return netcdf.create_file__xios_3d_face_full_levels( + temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs + ) + + def test_basic_load(self): + cube = self.create_synthetic_test_cube() + self.check_cube( + cube, shape=(1, 39, 866), location="face", level="full" + ) + + def test_scale_mesh(self): + cube = self.create_synthetic_test_cube(n_faces=10) + self.check_cube(cube, shape=(1, 39, 10), location="face", level="full") + + def test_scale_time(self): + cube = self.create_synthetic_test_cube(n_times=3) + self.check_cube( + cube, shape=(3, 39, 866), location="face", level="full" + ) + + def test_scale_levels(self): + cube = self.create_synthetic_test_cube(n_levels=10) + self.check_cube( + cube, shape=(1, 10, 866), location="face", level="full" + ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py index 73ec983ddc..1efc73700b 100644 --- a/lib/iris/tests/unit/util/test_reverse.py +++ b/lib/iris/tests/unit/util/test_reverse.py @@ -173,7 +173,9 @@ def test_cube_coord(self): cube1_reverse_spanning.coord("spanning").points, ) - msg = "Expected to find exactly 1 latitude coordinate, but found none." + msg = ( + "Expected to find exactly 1 'latitude' coordinate, but found none." + ) with self.assertRaisesRegex( iris.exceptions.CoordinateNotFoundError, msg ): diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml index fac21560a4..d623da88b3 100644 --- a/requirements/ci/py37.yml +++ b/requirements/ci/py37.yml @@ -14,7 +14,7 @@ dependencies: - cf-units>=2 - cftime<1.3.0 - dask>=2 - - matplotlib + - matplotlib=3.3.4 - netcdf4 - numpy>=1.14 - python-xxhash @@ -29,7 +29,6 @@ dependencies: - pandas - pip - python-stratify - - pyugrid # Test dependencies. - asv diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index 4be43fdba6..0225934765 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -14,7 +14,7 @@ dependencies: - cf-units>=2 - cftime<1.3.0 - dask>=2 - - matplotlib + - matplotlib=3.3.4 - netcdf4 - numpy>=1.14 - python-xxhash @@ -29,7 +29,6 @@ dependencies: - pandas - pip - python-stratify - - pyugrid # Test dependencies. - asv