Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
90 commits
Select commit Hold shift + click to select a range
0a2a49e
fixing compatibility with relative paths in open_store function withi…
aladinor Feb 2, 2025
ae80662
fixing/refactoring test to be compatible with Zarr-python v3
aladinor Feb 3, 2025
379db18
adding @requires_zarr_v3 decorator to TestZarrDatatreeIO
aladinor Feb 3, 2025
846dc50
replacing 0 with 1 in _create_test_datatree wich will write a chunk
aladinor Feb 3, 2025
ddfd0b5
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 3, 2025
3f9a8fb
fixing issues with groups
aladinor Feb 3, 2025
f140658
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 3, 2025
0e790eb
Merge branch 'main' into dtree-zarrv3
aladinor Feb 3, 2025
403afa9
fixing issue with dict creation
aladinor Feb 3, 2025
58e8f8e
Merge branch 'dtree-zarrv3' of https://github.com/aladinor/xarray int…
aladinor Feb 3, 2025
fd357fa
fixing issues with Mypy
aladinor Feb 3, 2025
8b993a1
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 3, 2025
d4aeeca
refactoring open_store in ZarrStore class to use Zarr.core.group.Grou…
aladinor Feb 3, 2025
3125647
refactoring datree test for zarr ensuring compatibility with zarr-pyt…
aladinor Feb 3, 2025
0c7485b
importing zarr.core.group only inside open_store function
aladinor Feb 3, 2025
fdeee94
documenting changes in what's-nwe.rst file
aladinor Feb 3, 2025
f3e2c66
Update xarray/backends/zarr.py
aladinor Feb 4, 2025
f9f1043
keeping grroup creation compatible with zarr v2
aladinor Feb 6, 2025
c118841
Merge branch 'main' into dtree-zarrv3
aladinor Feb 6, 2025
ec2086a
fixing issue with mypy
aladinor Feb 6, 2025
abaea4e
Merge branch 'main' into dtree-zarrv3
aladinor Feb 12, 2025
aa85bed
Merge branch 'main' into dtree-zarrv3
aladinor Feb 12, 2025
fce2957
adding root_path equal to '/' when opening group in zarr v3 to avoid …
aladinor Feb 12, 2025
e27b4b9
fixing tests accordingly
aladinor Feb 12, 2025
d03b003
Merge branch 'dtree-zarrv3' of https://github.com/aladinor/xarray int…
aladinor Feb 12, 2025
810a623
removing print statement
aladinor Feb 12, 2025
eabcc76
Merge branch 'main' into dtree-zarrv3
aladinor Feb 21, 2025
60e19d9
Merge branch 'main' into dtree-zarrv3
aladinor Feb 26, 2025
0934461
reverting changes made in unaligned test in zarr
aladinor Mar 5, 2025
6a74275
Merge branch 'main' into dtree-zarrv3
aladinor Mar 5, 2025
011f29c
adding requires_zarr_v3 decorator
aladinor Mar 5, 2025
e31c646
changing max_depth=None in Group.members to get all nested groups
aladinor Mar 6, 2025
e65f229
fixing unaligned test in datrees using zarr
aladinor Mar 6, 2025
9c88b26
Merge branch 'main' into dtree-zarrv3
dcherian Mar 7, 2025
5a668a4
Merge branch 'main' into dtree-zarrv3
aladinor Mar 7, 2025
53a9309
Merge branch 'main' into dtree-zarrv3
aladinor Mar 7, 2025
72c1ad6
Merge branch 'main' into dtree-zarrv3
aladinor Mar 7, 2025
502981c
Update xarray/backends/zarr.py
aladinor Mar 11, 2025
2f94763
Merge branch 'main' into dtree-zarrv3
aladinor Mar 12, 2025
3e09b61
Merge branch 'main' into dtree-zarrv3
aladinor Mar 13, 2025
d5a061e
updating whats-new.rst entry
aladinor Mar 13, 2025
a417371
remove funny-looking line and refactor to ensure reading consolidated…
TomNicholas Mar 13, 2025
8756919
parametrize over whether or not we write consolidated metadata
TomNicholas Mar 13, 2025
b85d70d
fix consolidated metadata
TomNicholas Mar 13, 2025
f1cc331
ian hcanges
ianhi Mar 13, 2025
296ed03
open_datatree_specific_group consolidated true works
ianhi Mar 13, 2025
46c61ca
refactoring
aladinor Mar 13, 2025
77e68e3
Merge branch 'main' into dtree-zarrv3
aladinor Mar 13, 2025
4da72ae
test: add consolidated parametrize to zarr datatree test
ianhi Mar 13, 2025
5f7c6b9
fix: group finding behavior consolidated
ianhi Mar 13, 2025
5dc7df7
Merge remote-tracking branch 'ianhi/aladinor/ian/updates' into dtree_…
TomNicholas Mar 17, 2025
9823d64
remove more debugging print statements
TomNicholas Mar 17, 2025
980ebb4
Merge branch 'dtree-zarrv3' into dtree-zarrv3-2
TomNicholas Mar 17, 2025
30f5bba
revert changes to test fixture
TomNicholas Mar 18, 2025
4d1fdb5
formatting
TomNicholas Mar 18, 2025
ecef578
add decorator to parametrize over zarr formats
TomNicholas Mar 18, 2025
c2a1f5f
ensure both versions of zarr-python and both versions of zarr-python …
TomNicholas Mar 18, 2025
cde6b65
change datatree fixture to not produce values that would be fill_valu…
TomNicholas Mar 18, 2025
09fad6e
refactor test to make expected behaviour clearer
TomNicholas Mar 18, 2025
77575b5
fix wrongly expected behaviour - should not expect inherited variable…
TomNicholas Mar 19, 2025
0a9f874
make arrays no longer scalars to dodge https://github.com/pydata/xarr…
TomNicholas Mar 19, 2025
565938b
Merge branch 'dtree-zarrv3-2' of https://github.com/TomNicholas/xarra…
TomNicholas Mar 19, 2025
daf0f42
fix bad merge
TomNicholas Mar 19, 2025
84bde40
parametrize almost every test over zarr_format
TomNicholas Mar 19, 2025
04d937c
parametrize encoding test over zarr_formats
TomNicholas Mar 19, 2025
765c5f0
use xfail in encoding test
TomNicholas Mar 19, 2025
7eee31c
updated expected behaviour of zarr on-disk in light of https://github…
TomNicholas Mar 19, 2025
0969422
fully revert change to simple_datatree test fixture by considered zar…
TomNicholas Mar 19, 2025
cacf419
parametrize unaligned_zarr test fixture over zarr_format
TomNicholas Mar 19, 2025
1a60ebe
move parametrize_over_zarr_format decorator to apply to entire test c…
TomNicholas Mar 19, 2025
d98abe3
for now explicitly consolidate metadata in test fixture
TomNicholas Mar 19, 2025
2dcefe4
correct bug in writing of consolidated metadata
TomNicholas Mar 19, 2025
a88e503
delete commented-out lines
TomNicholas Mar 19, 2025
22ac9b4
merges from main
TomNicholas Mar 19, 2025
69dc976
Revert "merges from main"
TomNicholas Mar 19, 2025
6e3e2aa
fix encodings test for zarr_format=3
TomNicholas Mar 19, 2025
6ce9578
tidy up
TomNicholas Mar 19, 2025
94f0ddc
Merge pull request #1 from TomNicholas/dtree-zarrv3-2
TomNicholas Mar 19, 2025
8573740
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 19, 2025
e2a58e8
Merge branch 'main' into dtree-zarrv3
TomNicholas Mar 19, 2025
71288c6
account for different default value of write_empty_chunks between zar…
TomNicholas Mar 19, 2025
47f3315
fix expected encoding key for compressor in zarr-python v2
TomNicholas Mar 19, 2025
2b50a97
account for exception type changing
TomNicholas Mar 19, 2025
59a978d
various typing fixes
TomNicholas Mar 19, 2025
fc368ce
Merge branch 'dtree-zarrv3' into dtree-zarrv3-2
TomNicholas Mar 19, 2025
cd6aad6
Merge pull request #2 from TomNicholas/dtree-zarrv3-2
TomNicholas Mar 19, 2025
3fb0b7f
remove outdated comment
TomNicholas Mar 19, 2025
e06fa25
bool type
TomNicholas Mar 20, 2025
0829c68
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 20, 2025
ee4273d
Merge branch 'main' into dtree-zarrv3
dcherian Mar 20, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ Deprecations

Bug fixes
~~~~~~~~~

- Fix incompatibilities between ``open_datatree`` and Zarr-Python V3, along with refactoring ``TestZarrDatatreeIO`` (:issue:`9960`, :pull:`10020`).
By `Alfonso Ladino-Rincon <https://github.com/aladinor>`_.

Documentation
~~~~~~~~~~~~~
Expand Down
26 changes: 12 additions & 14 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,10 +655,18 @@ def open_store(
use_zarr_fill_value_as_mask=use_zarr_fill_value_as_mask,
zarr_format=zarr_format,
)
group_paths = list(_iter_zarr_groups(zarr_group, parent=group))
from zarr.core.group import Group

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what do you think about moving this if/else into _iter_zarr_groups and handling the zarr2/3 difference there?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will make the code way much cleaner!

group_members: dict = dict(zarr_group.members(max_depth=1000))
group_members = {
(f"{group}/{path}" if group != "/" else path): group_store
for path, group_store in group_members.items()
if isinstance(group_store, Group)
}
group_members[group] = zarr_group
return {
group: cls(
zarr_group.get(group),
group_store,
mode,
consolidate_on_close,
append_dim,
Expand All @@ -669,7 +677,7 @@ def open_store(
use_zarr_fill_value_as_mask,
cache_members=cache_members,
)
for group in group_paths
for group, group_store in group_members.items()
}

@classmethod
Expand Down Expand Up @@ -1651,8 +1659,6 @@ def open_groups_as_dict(
zarr_version=None,
zarr_format=None,
) -> dict[str, Dataset]:
from xarray.core.treenode import NodePath

filename_or_obj = _normalize_path(filename_or_obj)

# Check for a group and make it a parent if it exists
Expand Down Expand Up @@ -1698,14 +1704,6 @@ def open_groups_as_dict(
return groups_dict


def _iter_zarr_groups(root: ZarrGroup, parent: str = "/") -> Iterable[str]:
parent_nodepath = NodePath(parent)
yield str(parent_nodepath)
for path, group in root.groups():
gpath = parent_nodepath / path
yield from _iter_zarr_groups(group, parent=str(gpath))


def _get_open_params(
store,
mode,
Expand Down Expand Up @@ -1751,7 +1749,7 @@ def _get_open_params(
consolidated = False

if _zarr_v3():
missing_exc = ValueError
missing_exc = AssertionError
else:
missing_exc = zarr.errors.GroupNotFoundError

Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def create_test_datatree():
"""

def _create_test_datatree(modify=lambda ds: ds):
set1_data = modify(xr.Dataset({"a": 0, "b": 1}))
set1_data = modify(xr.Dataset({"a": 1, "b": 2}))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can this be safely reverted?

Copy link
Member

@TomNicholas TomNicholas Mar 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Reverting this breaks the test_to_zarr_compute_false test. I think that's because zarr-python sees the array with a value 0 matching its default fill_value, and chooses not to write this chunk as an optimization. But the test expects all (non-dask) arrays to become chunks on disk and so fails.

However even the new values used by @aladinor have problems - they trigger this bug #10147.

I haven't been able to fully get around these bugs, but none of this is a datatree-specific issue though, so I think it is actually okay to change the test fixture...

Copy link
Member

@TomNicholas TomNicholas Mar 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was able to fully revert this once I taught the test assertion to understand the expected zarr behaviour for scalars and for arrays containing only fill_value.

aladinor@0969422

set2_data = modify(xr.Dataset({"a": ("x", [2, 3]), "b": ("x", [0.1, 0.2])}))
root_data = modify(xr.Dataset({"a": ("y", [6, 7, 8]), "set0": ("x", [9, 10])}))

Expand Down
99 changes: 55 additions & 44 deletions xarray/tests/test_backends_datatree.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
requires_dask,
requires_h5netcdf,
requires_netCDF4,
requires_zarr,
requires_zarr_v3,
)

if TYPE_CHECKING:
Expand Down Expand Up @@ -141,14 +141,16 @@ def unaligned_datatree_zarr(tmp_path_factory):
a (y) int64 16B ...
b (x) float64 16B ...
"""
from zarr import consolidate_metadata

filepath = tmp_path_factory.mktemp("data") / "unaligned_simple_datatree.zarr"
root_data = xr.Dataset({"a": ("y", [6, 7, 8]), "set0": ("x", [9, 10])})
set1_data = xr.Dataset({"a": 0, "b": 1})
set2_data = xr.Dataset({"a": ("y", [2, 3]), "b": ("x", [0.1, 0.2])})
root_data.to_zarr(filepath)
set1_data.to_zarr(filepath, group="/Group1", mode="a")
set2_data.to_zarr(filepath, group="/Group2", mode="a")
set1_data.to_zarr(filepath, group="/Group1/subgroup1", mode="a")
consolidate_metadata(filepath)
yield filepath


Expand Down Expand Up @@ -373,15 +375,12 @@ class TestH5NetCDFDatatreeIO(DatatreeIOBase):
engine: T_DataTreeNetcdfEngine | None = "h5netcdf"


@pytest.mark.skipif(
have_zarr_v3, reason="datatree support for zarr 3 is not implemented yet"
)
@requires_zarr
@requires_zarr_v3
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should this just be @requires_zarr? Does this mean we aren't testing for zarr2 anymore?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I fixed this in aladinor@c2a1f5f, which ensures we test for both versions of zarr-python and all supported zarr_formats.

class TestZarrDatatreeIO:
engine = "zarr"

def test_to_zarr(self, tmpdir, simple_datatree):
filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt = simple_datatree
original_dt.to_zarr(filepath)

Expand All @@ -391,16 +390,31 @@ def test_to_zarr(self, tmpdir, simple_datatree):
def test_zarr_encoding(self, tmpdir, simple_datatree):
from numcodecs.blosc import Blosc

filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt = simple_datatree

comp = {"compressor": Blosc(cname="zstd", clevel=3, shuffle=2)}
blosc = Blosc(cname="zstd", clevel=3, shuffle="shuffle").get_config()
comp = {"compressor": {"name": blosc.pop("id"), "configuration": blosc}}
enc = {"/set2": {var: comp for var in original_dt["/set2"].dataset.data_vars}}
original_dt.to_zarr(filepath, encoding=enc)

with open_datatree(filepath, engine="zarr") as roundtrip_dt:
print(roundtrip_dt["/set2/a"].encoding)
assert roundtrip_dt["/set2/a"].encoding["compressor"] == comp["compressor"]
retrieved_compressor = roundtrip_dt["/set2/a"].encoding["compressors"][
0
] # Get the BloscCodec object
assert (
retrieved_compressor.cname.name
== comp["compressor"]["configuration"]["cname"]
)
assert (
retrieved_compressor.clevel
== comp["compressor"]["configuration"]["clevel"]
)
assert (
retrieved_compressor.shuffle.name
== comp["compressor"]["configuration"]["shuffle"]
)

enc["/not/a/group"] = {"foo": "bar"} # type: ignore[dict-item]
with pytest.raises(ValueError, match="unexpected encoding group.*"):
Expand All @@ -409,9 +423,9 @@ def test_zarr_encoding(self, tmpdir, simple_datatree):
def test_to_zarr_zip_store(self, tmpdir, simple_datatree):
from zarr.storage import ZipStore

filepath = tmpdir / "test.zarr.zip"
filepath = str(tmpdir / "test.zarr.zip")
original_dt = simple_datatree
store = ZipStore(filepath)
store = ZipStore(filepath, mode="w")
original_dt.to_zarr(store)

with open_datatree(store, engine="zarr") as roundtrip_dt: # type: ignore[arg-type, unused-ignore]
Expand All @@ -432,32 +446,29 @@ def test_to_zarr_not_consolidated(self, tmpdir, simple_datatree):
assert_equal(original_dt, roundtrip_dt)

def test_to_zarr_default_write_mode(self, tmpdir, simple_datatree):
import zarr

simple_datatree.to_zarr(tmpdir)
simple_datatree.to_zarr(str(tmpdir))

# with default settings, to_zarr should not overwrite an existing dir
with pytest.raises(zarr.errors.ContainsGroupError):
simple_datatree.to_zarr(tmpdir)
with pytest.raises(FileExistsError):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I feel like this error type shouldn't need to be changed, but again I think it's zarr-python who's behavior changed, see zarr-developers/zarr-python#2821 (comment)

simple_datatree.to_zarr(str(tmpdir))

@requires_dask
def test_to_zarr_compute_false(self, tmpdir, simple_datatree):
import dask.array as da

filepath = tmpdir / "test.zarr"
original_dt = simple_datatree.chunk()
original_dt.to_zarr(filepath, compute=False)
original_dt.to_zarr(str(filepath), compute=False)

for node in original_dt.subtree:
for name, variable in node.dataset.variables.items():
var_dir = filepath / node.path / name
var_files = var_dir.listdir()
assert var_dir / ".zarray" in var_files
assert var_dir / ".zattrs" in var_files
assert var_dir / "zarr.json" in var_files
if isinstance(variable.data, da.Array):
assert var_dir / "0" not in var_files
assert var_dir / "zarr.json" in var_files
else:
assert var_dir / "0" in var_files
assert var_dir / "c" in var_files
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

are these v2 -> v3 changes?

Copy link
Contributor Author

@aladinor aladinor Mar 17, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep. Those changes I made are designed to work with zarr-python V3.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So these are good, but they should have been in addition to the tests for zarr_format=2, not instead of them, because otherwise we are no longer testing the ability to write to the v2 zarr format.

I've refactored this test in aladinor@09fad6e, to parametrize over zarr_format, and also to try to make the expected behaviour of the test clearer.

Also there was a bug with inherited coords (aladinor@77575b5).


def test_to_zarr_inherited_coords(self, tmpdir):
original_dt = DataTree.from_dict(
Expand All @@ -466,7 +477,7 @@ def test_to_zarr_inherited_coords(self, tmpdir):
"/sub": xr.Dataset({"b": (("x",), [5, 6])}),
}
)
filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt.to_zarr(filepath)

with open_datatree(filepath, engine="zarr") as roundtrip_dt:
Expand All @@ -476,7 +487,7 @@ def test_to_zarr_inherited_coords(self, tmpdir):

def test_open_groups_round_trip(self, tmpdir, simple_datatree) -> None:
"""Test `open_groups` opens a zarr store with the `simple_datatree` structure."""
filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt = simple_datatree
original_dt.to_zarr(filepath)

Expand All @@ -501,7 +512,7 @@ def test_open_datatree(self, unaligned_datatree_zarr) -> None:

@requires_dask
def test_open_datatree_chunks(self, tmpdir, simple_datatree) -> None:
filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")

chunks = {"x": 2, "y": 1}

Expand All @@ -527,9 +538,8 @@ def test_open_groups(self, unaligned_datatree_zarr) -> None:
unaligned_dict_of_datasets = open_groups(unaligned_datatree_zarr, engine="zarr")

assert "/" in unaligned_dict_of_datasets.keys()
assert "/Group1" in unaligned_dict_of_datasets.keys()
assert "/Group1/subgroup1" in unaligned_dict_of_datasets.keys()
assert "/Group2" in unaligned_dict_of_datasets.keys()
assert "Group1" in unaligned_dict_of_datasets.keys()
assert "Group2" in unaligned_dict_of_datasets.keys()
# Check that group name returns the correct datasets
with xr.open_dataset(
unaligned_datatree_zarr, group="/", engine="zarr"
Expand All @@ -538,22 +548,20 @@ def test_open_groups(self, unaligned_datatree_zarr) -> None:
with xr.open_dataset(
unaligned_datatree_zarr, group="Group1", engine="zarr"
) as expected:
assert_identical(unaligned_dict_of_datasets["/Group1"], expected)
with xr.open_dataset(
unaligned_datatree_zarr, group="/Group1/subgroup1", engine="zarr"
) as expected:
assert_identical(unaligned_dict_of_datasets["/Group1/subgroup1"], expected)
assert_identical(unaligned_dict_of_datasets["Group1"], expected)
with xr.open_dataset(
unaligned_datatree_zarr, group="/Group2", engine="zarr"
unaligned_datatree_zarr, group="Group2", engine="zarr"
) as expected:
assert_identical(unaligned_dict_of_datasets["/Group2"], expected)

assert_identical(unaligned_dict_of_datasets["Group2"], expected)
for ds in unaligned_dict_of_datasets.values():
ds.close()

@pytest.mark.filterwarnings(
"ignore:Failed to open Zarr store with consolidated metadata:RuntimeWarning"
)
def test_open_datatree_specific_group(self, tmpdir, simple_datatree) -> None:
"""Test opening a specific group within a Zarr store using `open_datatree`."""
filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
group = "/set2"
original_dt = simple_datatree
original_dt.to_zarr(filepath)
Expand All @@ -568,10 +576,7 @@ def test_open_groups_chunks(self, tmpdir) -> None:
"""Test `open_groups` with chunks on a zarr store."""

chunks = {"x": 2, "y": 1}
filepath = tmpdir / "test.zarr"

chunks = {"x": 2, "y": 1}

filepath = str(tmpdir / "test.zarr")
root_data = xr.Dataset({"a": ("y", [6, 7, 8]), "set0": ("x", [9, 10])})
set1_data = xr.Dataset({"a": ("y", [-1, 0, 1]), "b": ("x", [-10, 6])})
set2_data = xr.Dataset({"a": ("y", [1, 2, 3]), "b": ("x", [0.1, 0.2])})
Expand Down Expand Up @@ -605,13 +610,16 @@ def test_write_subgroup(self, tmpdir):
expected_dt = original_dt.copy()
expected_dt.name = None

filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt.to_zarr(filepath)

with open_datatree(filepath, engine="zarr") as roundtrip_dt:
assert_equal(original_dt, roundtrip_dt)
assert_identical(expected_dt, roundtrip_dt)

@pytest.mark.filterwarnings(
"ignore:Failed to open Zarr store with consolidated metadata:RuntimeWarning"
)
def test_write_inherited_coords_false(self, tmpdir):
original_dt = DataTree.from_dict(
{
Expand All @@ -620,7 +628,7 @@ def test_write_inherited_coords_false(self, tmpdir):
}
)

filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt.to_zarr(filepath, write_inherited_coords=False)

with open_datatree(filepath, engine="zarr") as roundtrip_dt:
Expand All @@ -631,6 +639,9 @@ def test_write_inherited_coords_false(self, tmpdir):
with open_datatree(filepath, group="child", engine="zarr") as roundtrip_child:
assert_identical(expected_child, roundtrip_child)

@pytest.mark.filterwarnings(
"ignore:Failed to open Zarr store with consolidated metadata:RuntimeWarning"
)
def test_write_inherited_coords_true(self, tmpdir):
original_dt = DataTree.from_dict(
{
Expand All @@ -639,7 +650,7 @@ def test_write_inherited_coords_true(self, tmpdir):
}
)

filepath = tmpdir / "test.zarr"
filepath = str(tmpdir / "test.zarr")
original_dt.to_zarr(filepath, write_inherited_coords=True)

with open_datatree(filepath, engine="zarr") as roundtrip_dt:
Expand Down
Loading