diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index a23d47a2e0..7d2ae2a3ad 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -72,8 +72,8 @@ def _optimum_chunksize_internals( Args: - * chunks (tuple of int, or None): - Pre-existing chunk shape of the target data : None if unknown. + * chunks (tuple of int): + Pre-existing chunk shape of the target data. * shape (tuple of int): The full array shape of the target data. * limit (int): @@ -106,8 +106,6 @@ def _optimum_chunksize_internals( "chunks = [c[0] for c in normalise_chunks('auto', ...)]". """ - if chunks is None: - chunks = list(shape) # Set the chunksize limit. if limit is None: diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 107415c79d..b7c1c80cf4 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -780,7 +780,13 @@ def set( @contextmanager def from_file(self) -> None: """ - Ensures the chunks are loaded in from file variables, else will throw an error. + Ensures the chunksizes are loaded in from NetCDF file variables. + + Raises + ------ + KeyError + If any NetCDF data variables - those that become + :class:`~iris.cube.Cube`\\ s - do not specify chunksizes. Notes ----- @@ -798,7 +804,7 @@ def from_file(self) -> None: @contextmanager def as_dask(self) -> None: """ - Ensures the chunks are decided from dask. + Uses Dask :external+dask:doc:`array` to control chunksizes. Notes -----