Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion docs/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,10 @@ This document explains the changes made to Iris for this release
#. `@bouweandela`_ added the option to specify the Dask chunks of the target
array in :func:`iris.util.broadcast_to_shape`. (:pull:`5620`)

#. `@schlunma`_ allowed :func:`iris.analysis.cartography.area_weights` to
return dask arrays with arbitrary chunks. (:pull:`5658`)


🔥 Deprecations
===============

Expand Down Expand Up @@ -90,4 +94,4 @@ This document explains the changes made to Iris for this release


.. comment
Whatsnew resources in alphabetical order:
Whatsnew resources in alphabetical order:
21 changes: 18 additions & 3 deletions lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth):
return np.abs(areas)


def area_weights(cube, normalize=False):
def area_weights(cube, normalize=False, compute=True, chunks=None):
r"""Return an array of area weights, with the same dimensions as the cube.

This is a 2D lat/lon area weights array, repeated over the non lat/lon
Expand All @@ -401,6 +401,13 @@ def area_weights(cube, normalize=False):
normalize : bool, default=False
If False, weights are grid cell areas. If True, weights are grid
cell areas divided by the total grid area.
compute : bool, default=True
If False, return a lazy dask array. If True, return a numpy array.
chunks : tuple, optional
If compute is False and a value is provided, then the result will use
these chunks instead of the same chunks as the cube data. The values
provided here will only be used along dimensions that are not latitude
or longitude.

Returns
-------
Expand Down Expand Up @@ -476,7 +483,13 @@ def area_weights(cube, normalize=False):

# Create 2D weights from bounds.
# Use the geographical area as the weight for each cell
ll_weights = _quadrant_area(lat.bounds, lon.bounds, radius_of_earth)
if compute:
lat_bounds = lat.bounds
lon_bounds = lon.bounds
else:
lat_bounds = lat.lazy_bounds()
lon_bounds = lon.lazy_bounds()
ll_weights = _quadrant_area(lat_bounds, lon_bounds, radius_of_earth)

# Normalize the weights if necessary.
if normalize:
Expand All @@ -491,7 +504,9 @@ def area_weights(cube, normalize=False):
if dim is not None:
wshape.append(ll_weights.shape[idim])
ll_weights = ll_weights.reshape(wshape)
broad_weights = iris.util.broadcast_to_shape(ll_weights, cube.shape, broadcast_dims)
broad_weights = iris.util.broadcast_to_shape(
ll_weights, cube.shape, broadcast_dims, chunks=chunks
)

return broad_weights

Expand Down
43 changes: 43 additions & 0 deletions lib/iris/tests/test_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -1116,6 +1116,8 @@ def test_rotate_1d(self):

@tests.skip_data
class TestAreaWeights(tests.IrisTest):
# Note: chunks is simply ignored for non-lazy data
@pytest.mark.parametrize("chunks", [None, (2, 3)])
def test_area_weights(self):
small_cube = iris.tests.stock.simple_pp()
# Get offset, subsampled region: small enough to test against literals
Expand Down Expand Up @@ -1155,6 +1157,47 @@ def test_area_weights(self):
)


@tests.skip_data
class TestLazyAreaWeights:
@pytest.mark.parametrize("normalize", [True, False])
@pytest.mark.parametrize("chunks", [None, (2, 3, 4), (2, 2, 2)])
def test_lazy_area_weights(self, chunks, normalize):
small_cube = iris.tests.stock.simple_3d()[[0, 0, 0, 0], :, :]
small_cube.coord("latitude").guess_bounds()
small_cube.coord("longitude").guess_bounds()

area_weights = iris.analysis.cartography.area_weights(
small_cube,
normalize=normalize,
compute=False,
chunks=chunks,
)

assert isinstance(area_weights, da.Array)

# Check that chunksizes are as expected
if chunks is None:
assert area_weights.chunksize == (4, 3, 4)
else:
assert area_weights.chunksize == (2, 3, 4)

# Check that actual weights are as expected (known good output)
if normalize:
expected_2d = [
[0.03661165, 0.03661165, 0.03661165, 0.03661165],
[0.1767767, 0.1767767, 0.1767767, 0.1767767],
[0.03661165, 0.03661165, 0.03661165, 0.03661165],
]
else:
expected_2d = [
[1.86536150e13, 1.86536150e13, 1.86536150e13, 1.86536150e13],
[9.00676206e13, 9.00676206e13, 9.00676206e13, 9.00676206e13],
[1.86536150e13, 1.86536150e13, 1.86536150e13, 1.86536150e13],
]
expected = np.broadcast_to(expected_2d, (4, 3, 4))
np.testing.assert_allclose(area_weights.compute(), expected)


@tests.skip_data
class TestAreaWeightGeneration(tests.IrisTest):
def setUp(self):
Expand Down