Skip to content

Commit 83ff577

Browse files
Include isssue references for xfailed tests.
1 parent 5b085a6 commit 83ff577

2 files changed

Lines changed: 20 additions & 22 deletions

File tree

virtualizarr/tests/test_readers/conftest.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -205,21 +205,21 @@ def skip_test_for_libhdf5_version():
205205
return libhdf5_version < Version("1.14")
206206

207207

208-
@pytest.fixture(params=["blosc_zlib"])
208+
@pytest.fixture(params=["blosc_zlib", ""])
209209
def filter_encoded_roundtrip_netcdf4_file(
210210
tmpdir, request, skip_test_for_libhdf5_version
211211
):
212212
if skip_test_for_libhdf5_version:
213213
pytest.skip("Requires libhdf5 >= 1.14")
214214
ds = create_test_data(dim_sizes=(20, 80, 10))
215+
encoding_config = {
216+
"chunksizes": (20, 40),
217+
"original_shape": ds.var2.shape,
218+
"blosc_shuffle": 1,
219+
"fletcher32": False,
220+
}
215221
if "blosc" in request.param:
216-
encoding_config = {
217-
"compression": request.param,
218-
"chunksizes": (20, 40),
219-
"original_shape": ds.var2.shape,
220-
"blosc_shuffle": 1,
221-
"fletcher32": False,
222-
}
222+
encoding_config["compression"] = request.param
223223
# Check on how handle scalar dim.
224224
ds = ds.drop_dims("dim3")
225225
ds["var2"].encoding.update(encoding_config)

virtualizarr/tests/test_readers/test_hdf_integration.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -3,50 +3,48 @@
33
import xarray.testing as xrt
44

55
import virtualizarr
6-
from virtualizarr.backend import FileType
76
from virtualizarr.readers.hdf import HDFVirtualBackend
87
from virtualizarr.tests import requires_kerchunk
98

109

1110
@requires_kerchunk
12-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
1311
class TestIntegration:
14-
@pytest.mark.xfail(reason="0 time start is being interpreted as fillvalue")
12+
@pytest.mark.xfail(
13+
reason="0 time start is being interpreted as fillvalue see issues/280"
14+
)
1515
def test_filters_h5netcdf_roundtrip(
16-
self, tmpdir, filter_encoded_roundtrip_hdf5_file, hdf_backend
16+
self, tmpdir, filter_encoded_roundtrip_hdf5_file, backend=HDFVirtualBackend
1717
):
1818
ds = xr.open_dataset(filter_encoded_roundtrip_hdf5_file, decode_times=True)
1919
vds = virtualizarr.open_virtual_dataset(
2020
filter_encoded_roundtrip_hdf5_file,
2121
loadable_variables=["time"],
2222
cftime_variables=["time"],
23-
backend=hdf_backend,
23+
backend=HDFVirtualBackend,
2424
)
2525
kerchunk_file = f"{tmpdir}/kerchunk.json"
2626
vds.virtualize.to_kerchunk(kerchunk_file, format="json")
2727
roundtrip = xr.open_dataset(kerchunk_file, engine="kerchunk", decode_times=True)
2828
xrt.assert_allclose(ds, roundtrip)
2929

30-
@pytest.mark.xfail(reason="Coordinate issue affecting kerchunk and HDF reader.")
30+
@pytest.mark.xfail(
31+
reason="Coordinate issue affecting only hdf reader see pull/#260"
32+
)
3133
def test_filters_netcdf4_roundtrip(
32-
self, tmpdir, filter_encoded_roundtrip_netcdf4_file, hdf_backend
34+
self, tmpdir, filter_encoded_roundtrip_netcdf4_file
3335
):
3436
filepath = filter_encoded_roundtrip_netcdf4_file["filepath"]
3537
ds = xr.open_dataset(filepath)
36-
vds = virtualizarr.open_virtual_dataset(
37-
filepath, filetype=FileType("netcdf4"), backend=hdf_backend
38-
)
38+
vds = virtualizarr.open_virtual_dataset(filepath, backend=HDFVirtualBackend)
3939
kerchunk_file = f"{tmpdir}/kerchunk.json"
4040
vds.virtualize.to_kerchunk(kerchunk_file, format="json")
4141
roundtrip = xr.open_dataset(kerchunk_file, engine="kerchunk")
4242
xrt.assert_equal(ds, roundtrip)
4343

44-
def test_filter_and_cf_roundtrip(
45-
self, tmpdir, filter_and_cf_roundtrip_hdf5_file, hdf_backend
46-
):
44+
def test_filter_and_cf_roundtrip(self, tmpdir, filter_and_cf_roundtrip_hdf5_file):
4745
ds = xr.open_dataset(filter_and_cf_roundtrip_hdf5_file)
4846
vds = virtualizarr.open_virtual_dataset(
49-
filter_and_cf_roundtrip_hdf5_file, backend=hdf_backend
47+
filter_and_cf_roundtrip_hdf5_file, backend=HDFVirtualBackend
5048
)
5149
kerchunk_file = f"{tmpdir}/filter_cf_kerchunk.json"
5250
vds.virtualize.to_kerchunk(kerchunk_file, format="json")

0 commit comments

Comments
 (0)