Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 8 additions & 12 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,10 +447,11 @@ def extract_zarr_variable_encoding(

safe_to_drop = {"source", "original_shape", "preferred_chunks"}
valid_encodings = {
"codecs",
"chunks",
"compressor",
"compressor", # TODO: delete when min zarr >=3
"compressors",
"filters",
"serializer",
"cache_metadata",
"write_empty_chunks",
}
Expand Down Expand Up @@ -480,7 +481,7 @@ def extract_zarr_variable_encoding(
mode=mode,
shape=shape,
)
encoding["chunks"] = chunks
encoding["chunks"] = chunks or "auto"
return encoding


Expand Down Expand Up @@ -816,22 +817,17 @@ def open_store_variable(self, name):
)
attributes = dict(attributes)

# TODO: this should not be needed once
# https://github.com/zarr-developers/zarr-python/issues/1269 is resolved.
attributes.pop("filters", None)

encoding = {
"chunks": zarr_array.chunks,
"preferred_chunks": dict(zip(dimensions, zarr_array.chunks, strict=True)),
}

if _zarr_v3() and zarr_array.metadata.zarr_format == 3:
encoding["codecs"] = [x.to_dict() for x in zarr_array.metadata.codecs]
elif _zarr_v3():
if _zarr_v3():
encoding.update(
{
"compressor": zarr_array.metadata.compressor,
"filters": zarr_array.metadata.filters,
"compressors": zarr_array.compressors,
"filters": zarr_array.filters,
# "serializer": zarr_array.serializer,
}
)
else:
Expand Down
64 changes: 27 additions & 37 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,21 +134,21 @@


@pytest.fixture(scope="module", params=ZARR_FORMATS)
def default_zarr_version(request) -> Generator[None, None]:
def default_zarr_format(request) -> Generator[None, None]:
if has_zarr_v3:
with zarr.config.set(default_zarr_version=request.param):
with zarr.config.set(default_zarr_format=request.param):
yield
else:
yield


def skip_if_zarr_format_3(reason: str):
if has_zarr_v3 and zarr.config["default_zarr_version"] == 3:
if has_zarr_v3 and zarr.config["default_zarr_format"] == 3:
pytest.skip(reason=f"Unsupported with zarr_format=3: {reason}")


def skip_if_zarr_format_2(reason: str):
if not has_zarr_v3 or (zarr.config["default_zarr_version"] == 2):
if not has_zarr_v3 or (zarr.config["default_zarr_format"] == 2):
pytest.skip(reason=f"Unsupported with zarr_format=2: {reason}")


Expand Down Expand Up @@ -411,7 +411,7 @@
def test_write_store(self) -> None:
expected = create_test_data()
with self.create_store() as store:
expected.dump_to_store(store)

Check failure on line 414 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestZarrWriteEmpty.test_write_store[2] ValueError: too many dimensions in chunks
# we need to cf decode the store because it has time and
# non-dimension coordinates
with xr.decode_cf(store) as actual:
Expand All @@ -438,7 +438,7 @@

def test_roundtrip_test_data(self) -> None:
expected = create_test_data()
with self.roundtrip(expected) as actual:

Check failure on line 441 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestZarrWriteEmpty.test_roundtrip_test_data[2] ValueError: too many dimensions in chunks
self.check_dtypes_roundtripped(expected, actual)
assert_identical(expected, actual)

Expand All @@ -461,7 +461,7 @@

with pytest.raises(AssertionError):
# make sure the contextmanager works!
with assert_loads() as ds:

Check failure on line 464 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestZarrWriteEmpty.test_load[2] ValueError: too many dimensions in chunks
pass

with assert_loads() as ds:
Expand All @@ -478,7 +478,7 @@
def test_dataset_compute(self) -> None:
expected = create_test_data()

with self.roundtrip(expected) as actual:

Check failure on line 481 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestZarrWriteEmpty.test_dataset_compute[2] ValueError: too many dimensions in chunks
# Test Dataset.compute()
for k, v in actual.variables.items():
# IndexVariables are eagerly cached
Expand Down Expand Up @@ -629,12 +629,12 @@
def test_roundtrip_timedelta_data(self) -> None:
time_deltas = pd.to_timedelta(["1h", "2h", "NaT"]) # type: ignore[arg-type, unused-ignore]
expected = Dataset({"td": ("td", time_deltas), "td0": time_deltas[0]})
with self.roundtrip(expected) as actual:

Check failure on line 632 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_timedelta_data[2] ValueError: too many dimensions in chunks

Check failure on line 632 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_timedelta_data[2] ValueError: too many dimensions in chunks

Check failure on line 632 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_timedelta_data[2] ValueError: too many dimensions in chunks

Check failure on line 632 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_timedelta_data[2] ValueError: too many dimensions in chunks
assert_identical(expected, actual)

def test_roundtrip_float64_data(self) -> None:
expected = Dataset({"x": ("y", np.array([1.0, 2.0, np.pi], dtype="float64"))})
with self.roundtrip(expected) as actual:

Check failure on line 637 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_float64_data[2] ValueError: too many dimensions in chunks

Check failure on line 637 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_float64_data[2] ValueError: too many dimensions in chunks

Check failure on line 637 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_float64_data[2] ValueError: too many dimensions in chunks

Check failure on line 637 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_float64_data[2] ValueError: too many dimensions in chunks
assert_identical(expected, actual)

@requires_netcdf
Expand All @@ -652,7 +652,7 @@
{"foo": ("x", [0, 1])}, {"x": [2, 3], "y": ("a", [42]), "z": ("x", [4, 5])}
)

with self.roundtrip(original) as actual:

Check failure on line 655 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 655 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 655 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 655 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_coordinates[2] ValueError: too many dimensions in chunks
assert_identical(original, actual)

original["foo"].encoding["coordinates"] = "y"
Expand All @@ -667,7 +667,7 @@
original = Dataset(
{"foo": ("x", [0, 1])}, {"x": [2, 3], "y": ("a", [42]), "z": ("x", [4, 5])}
)
with self.roundtrip(original) as actual:

Check failure on line 670 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_global_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 670 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_global_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 670 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_global_coordinates[2] ValueError: too many dimensions in chunks

Check failure on line 670 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_global_coordinates[2] ValueError: too many dimensions in chunks
assert_identical(original, actual)

# test that global "coordinates" is as expected
Expand All @@ -684,13 +684,13 @@
original = Dataset(coords={"x": 0, "y z": 1})
expected = Dataset({"y z": 1}, {"x": 0})
with pytest.warns(SerializationWarning):
with self.roundtrip(original) as actual:

Check failure on line 687 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_coordinates_with_space[2] ValueError: too many dimensions in chunks

Check failure on line 687 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_coordinates_with_space[2] ValueError: too many dimensions in chunks

Check failure on line 687 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_coordinates_with_space[2] ValueError: too many dimensions in chunks

Check failure on line 687 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_coordinates_with_space[2] ValueError: too many dimensions in chunks
assert_identical(expected, actual)

def test_roundtrip_boolean_dtype(self) -> None:
original = create_boolean_data()
assert original["x"].dtype == "bool"
with self.roundtrip(original) as actual:

Check failure on line 693 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestZarrWriteEmpty.test_roundtrip_boolean_dtype[2] ValueError: too many dimensions in chunks

Check failure on line 693 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestZarrWriteEmpty.test_roundtrip_boolean_dtype[2] ValueError: too many dimensions in chunks

Check failure on line 693 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_roundtrip_boolean_dtype[2] ValueError: too many dimensions in chunks

Check failure on line 693 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_roundtrip_boolean_dtype[2] ValueError: too many dimensions in chunks
assert_identical(original, actual)
assert actual["x"].dtype == "bool"
# this checks for preserving dtype during second roundtrip
Expand All @@ -701,7 +701,7 @@

def test_orthogonal_indexing(self) -> None:
in_memory = create_test_data()
with self.roundtrip(in_memory) as on_disk:

Check failure on line 704 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestZarrWriteEmpty.test_orthogonal_indexing[2] ValueError: too many dimensions in chunks

Check failure on line 704 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestZarrWriteEmpty.test_orthogonal_indexing[2] ValueError: too many dimensions in chunks
indexers = {"dim1": [1, 2, 0], "dim2": [3, 2, 0, 3], "dim3": np.arange(5)}
expected = in_memory.isel(indexers)
actual = on_disk.isel(**indexers)
Expand Down Expand Up @@ -2270,7 +2270,7 @@


@requires_zarr
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
class ZarrBase(CFEncodedBase):
DIMENSION_KEY = "_ARRAY_DIMENSIONS"
zarr_version = 2
Expand Down Expand Up @@ -2675,40 +2675,35 @@
assert_identical(original, actual)

def test_compressor_encoding(self) -> None:
from numcodecs.blosc import Blosc

original = create_test_data()
# specify a custom compressor

if has_zarr_v3 and zarr.config.config["default_zarr_version"] == 3:
encoding_key = "codecs"
if has_zarr_v3 and zarr.config.config["default_zarr_format"] == 3:
encoding_key = "compressors"
# all parameters need to be explicitly specified in order for the comparison to pass below
encoding = {
"serializer": zarr.codecs.BytesCodec(endian="little"),
encoding_key: (
zarr.codecs.BytesCodec(endian="little"),
zarr.codecs.BloscCodec(
Blosc(
cname="zstd",
clevel=3,
shuffle="shuffle",
typesize=8,
blocksize=0,
),
)
),
}
else:
from numcodecs.blosc import Blosc

encoding_key = "compressor"
encoding = {encoding_key: Blosc(cname="zstd", clevel=3, shuffle=2)}
encoding_key = "compressors" if has_zarr_v3 else "compressor"
encoding = {encoding_key: (Blosc(cname="zstd", clevel=3, shuffle=2),)}

save_kwargs = dict(encoding={"var1": encoding})

with self.roundtrip(original, save_kwargs=save_kwargs) as ds:
enc = ds["var1"].encoding[encoding_key]
if has_zarr_v3 and zarr.config.config["default_zarr_version"] == 3:
# TODO: figure out a cleaner way to do this comparison
codecs = zarr.core.metadata.v3.parse_codecs(enc)
assert codecs == encoding[encoding_key]
else:
assert enc == encoding[encoding_key]
assert enc == encoding[encoding_key]

def test_group(self) -> None:
original = create_test_data()
Expand Down Expand Up @@ -2846,14 +2841,9 @@
import numcodecs

encoding_value: Any
if has_zarr_v3 and zarr.config.config["default_zarr_version"] == 3:
compressor = zarr.codecs.BloscCodec()
encoding_key = "codecs"
encoding_value = [zarr.codecs.BytesCodec(), compressor]
else:
compressor = numcodecs.Blosc()
encoding_key = "compressor"
encoding_value = compressor
compressor = numcodecs.Blosc()
encoding_key = "compressors" if has_zarr_v3 else "compressor"
encoding_value = compressor

encoding = {"da": {encoding_key: encoding_value}}
ds.to_zarr(store_target, mode="w", encoding=encoding, **self.version_kwargs)
Expand Down Expand Up @@ -3251,7 +3241,7 @@
obj.attrs["good"] = {"key": "value"}
ds = obj if isinstance(obj, Dataset) else obj.to_dataset()
with self.create_zarr_target() as store_target:
ds.to_zarr(store_target, **self.version_kwargs)

Check failure on line 3244 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrWriteEmpty.test_attributes[2-obj1] ValueError: too many dimensions in chunks
assert_identical(ds, xr.open_zarr(store_target, **self.version_kwargs))

obj.attrs["bad"] = DataArray()
Expand Down Expand Up @@ -3697,7 +3687,7 @@
)

with self.create_zarr_target() as store, patched as mock:
ds.to_zarr(store, mode="w")

Check failure on line 3690 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrWriteEmpty.test_avoid_excess_metadata_calls[2] ValueError: too many dimensions in chunks

Check failure on line 3690 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrWriteEmpty.test_avoid_excess_metadata_calls[2] ValueError: too many dimensions in chunks

# We expect this to request array metadata information, so call_count should be == 1,
xrds = xr.open_zarr(store)
Expand Down Expand Up @@ -5488,7 +5478,7 @@
self.skip_if_zarr_python_3_and_zip_store(tmp_store)
original_da = DataArray(np.arange(12).reshape((3, 4)))

original_da.to_zarr(tmp_store)

Check failure on line 5481 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_no_name[tmp_path] ValueError: too many dimensions in chunks

Check failure on line 5481 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_no_name[ZipStore] ValueError: too many dimensions in chunks

Check failure on line 5481 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_no_name[Dict] ValueError: too many dimensions in chunks

with open_dataarray(tmp_store, engine="zarr") as loaded_da:
assert_identical(original_da, loaded_da)
Expand All @@ -5497,7 +5487,7 @@
self.skip_if_zarr_python_3_and_zip_store(tmp_store)
original_da = DataArray(np.arange(12).reshape((3, 4)), name="test")

original_da.to_zarr(tmp_store)

Check failure on line 5490 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_with_name[tmp_path] ValueError: too many dimensions in chunks

Check failure on line 5490 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_with_name[ZipStore] ValueError: too many dimensions in chunks

Check failure on line 5490 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12 all-but-numba

TestDataArrayToZarr.test_dataarray_to_zarr_with_name[Dict] ValueError: too many dimensions in chunks

with open_dataarray(tmp_store, engine="zarr") as loaded_da:
assert_identical(original_da, loaded_da)
Expand Down Expand Up @@ -5786,7 +5776,7 @@
var = xr.Variable("x", [1, 2])
actual = backends.zarr.extract_zarr_variable_encoding(var)
assert "chunks" in actual
assert actual["chunks"] is None

Check failure on line 5779 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

test_extract_zarr_variable_encoding AssertionError: assert 'auto' is None

Check failure on line 5779 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

test_extract_zarr_variable_encoding AssertionError: assert 'auto' is None

Check failure on line 5779 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

test_extract_zarr_variable_encoding AssertionError: assert 'auto' is None

Check failure on line 5779 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

test_extract_zarr_variable_encoding AssertionError: assert 'auto' is None

var = xr.Variable("x", [1, 2], encoding={"chunks": (1,)})
actual = backends.zarr.extract_zarr_variable_encoding(var)
Expand Down Expand Up @@ -5819,7 +5809,7 @@

m = fsspec.filesystem("memory")
mm = m.get_mapper("out1.zarr")
ds.to_zarr(mm) # old interface

Check failure on line 5812 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

test_open_fsspec ValueError: too many dimensions in chunks

Check failure on line 5812 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

test_open_fsspec ValueError: too many dimensions in chunks

Check failure on line 5812 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

test_open_fsspec ValueError: too many dimensions in chunks

Check failure on line 5812 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

test_open_fsspec ValueError: too many dimensions in chunks
ds0 = ds.copy()
ds0["time"] = ds.time + pd.to_timedelta("1 day")
mm = m.get_mapper("out2.zarr")
Expand Down Expand Up @@ -6072,7 +6062,7 @@
with create_tmp_file(suffix=".zarr") as tmp:
ds = self._create_nczarr(tmp)
expected = ds[["var1"]]
expected.to_zarr(tmp, mode="w")

Check failure on line 6065 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10

TestNCZarr.test_overwriting_nczarr ValueError: too many dimensions in chunks

Check failure on line 6065 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestNCZarr.test_overwriting_nczarr ValueError: too many dimensions in chunks

Check failure on line 6065 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.10

TestNCZarr.test_overwriting_nczarr ValueError: too many dimensions in chunks

Check failure on line 6065 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / macos-latest py3.12

TestNCZarr.test_overwriting_nczarr ValueError: too many dimensions in chunks
actual = xr.open_zarr(tmp, consolidated=False)
assert_identical(expected, actual)

Expand All @@ -6092,25 +6082,25 @@

@requires_netCDF4
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_pickle_open_mfdataset_dataset():
with open_example_mfdataset(["bears.nc"]) as ds:
assert_identical(ds, pickle.loads(pickle.dumps(ds)))


@requires_zarr
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_closing_internal_zip_store():
store_name = "tmp.zarr.zip"
original_da = DataArray(np.arange(12).reshape((3, 4)))
original_da.to_zarr(store_name, mode="w")

Check failure on line 6096 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

test_zarr_closing_internal_zip_store[2] ValueError: too many dimensions in chunks

Check failure on line 6096 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

test_zarr_closing_internal_zip_store[2] ValueError: too many dimensions in chunks

with open_dataarray(store_name, engine="zarr") as loaded_da:
assert_identical(original_da, loaded_da)


@requires_zarr
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
class TestZarrRegionAuto:
def test_zarr_region_auto_all(self, tmp_path):
x = np.arange(0, 50, 10)
Expand All @@ -6125,7 +6115,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6118 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_auto_all[2] ValueError: too many dimensions in chunks

Check failure on line 6118 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_auto_all[2] ValueError: too many dimensions in chunks

ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8))
ds_region.to_zarr(tmp_path / "test.zarr", region="auto")
Expand All @@ -6149,7 +6139,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6142 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_auto_mixed[2] ValueError: too many dimensions in chunks

Check failure on line 6142 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_auto_mixed[2] ValueError: too many dimensions in chunks

ds_region = 1 + ds.isel(x=slice(2, 4), y=slice(6, 8))
ds_region.to_zarr(
Expand All @@ -6175,7 +6165,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6168 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_auto_noncontiguous[2] ValueError: too many dimensions in chunks

Check failure on line 6168 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_auto_noncontiguous[2] ValueError: too many dimensions in chunks

ds_region = 1 + ds.isel(x=[0, 2, 3], y=[5, 6])
with pytest.raises(ValueError):
Expand All @@ -6194,7 +6184,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6187 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_auto_new_coord_vals[2] ValueError: too many dimensions in chunks

Check failure on line 6187 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_auto_new_coord_vals[2] ValueError: too many dimensions in chunks

x = np.arange(5, 55, 10)
y = np.arange(0, 20, 2)
Expand Down Expand Up @@ -6230,7 +6220,7 @@
region_slice = dict(x=slice(2, 4), y=slice(6, 8))
ds_region = 1 + ds.isel(region_slice)

ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6223 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_index_write[2] ValueError: too many dimensions in chunks

Check failure on line 6223 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_index_write[2] ValueError: too many dimensions in chunks

region: Mapping[str, slice] | Literal["auto"]
for region in [region_slice, "auto"]: # type: ignore[assignment]
Expand Down Expand Up @@ -6262,7 +6252,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6255 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

TestZarrRegionAuto.test_zarr_region_append[2] ValueError: too many dimensions in chunks

Check failure on line 6255 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

TestZarrRegionAuto.test_zarr_region_append[2] ValueError: too many dimensions in chunks

x_new = np.arange(40, 70, 10)
data_new = np.ones((3, 10))
Expand All @@ -6286,7 +6276,7 @@


@requires_zarr
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_region(tmp_path):
x = np.arange(0, 50, 10)
y = np.arange(0, 20, 2)
Expand All @@ -6300,7 +6290,7 @@
)
}
)
ds.to_zarr(tmp_path / "test.zarr")

Check failure on line 6293 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

test_zarr_region[2] ValueError: too many dimensions in chunks

Check failure on line 6293 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.11 all-but-dask

test_zarr_region[2] ValueError: too many dimensions in chunks

ds_transposed = ds.transpose("y", "x")

Expand All @@ -6315,7 +6305,7 @@

@requires_zarr
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_region_chunk_partial(tmp_path):
"""
Check that writing to partial chunks with `region` fails, assuming `safe_chunks=False`.
Expand All @@ -6336,7 +6326,7 @@

@requires_zarr
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_append_chunk_partial(tmp_path):
t_coords = np.array([np.datetime64("2020-01-01").astype("datetime64[ns]")])
data = np.ones((10, 10))
Expand All @@ -6347,7 +6337,7 @@
coords={"time": t_coords},
name="foo",
)
da.to_zarr(tmp_path / "foo.zarr", mode="w", encoding={"foo": {"chunks": (5, 5, 1)}})

Check failure on line 6340 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.10 min-all-deps

test_zarr_append_chunk_partial[2] ValueError: too many dimensions in chunks

new_time = np.array([np.datetime64("2021-01-01").astype("datetime64[ns]")])

Expand All @@ -6374,7 +6364,7 @@

@requires_zarr
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_region_chunk_partial_offset(tmp_path):
# https://github.com/pydata/xarray/pull/8459#issuecomment-1819417545
store = tmp_path / "foo.zarr"
Expand All @@ -6394,7 +6384,7 @@

@requires_zarr
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_safe_chunk_append_dim(tmp_path):
store = tmp_path / "foo.zarr"
data = np.ones((20,))
Expand Down Expand Up @@ -6445,7 +6435,7 @@

@requires_zarr
@requires_dask
@pytest.mark.usefixtures("default_zarr_version")
@pytest.mark.usefixtures("default_zarr_format")
def test_zarr_safe_chunk_region(tmp_path):
store = tmp_path / "foo.zarr"

Expand Down
Loading