Skip to content

Commit 07bfacc

Browse files
Explicitly specify HDF5VirtualBackend for test parameter.
1 parent 3e216dc commit 07bfacc

2 files changed

Lines changed: 17 additions & 19 deletions

File tree

virtualizarr/tests/test_backend.py

Lines changed: 11 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from virtualizarr import open_virtual_dataset
1212
from virtualizarr.backend import FileType, automatically_determine_filetype
1313
from virtualizarr.manifests import ManifestArray
14+
from virtualizarr.readers import HDF5VirtualBackend
1415
from virtualizarr.readers.hdf import HDFVirtualBackend
1516
from virtualizarr.tests import (
1617
has_astropy,
@@ -83,7 +84,7 @@ def test_FileType():
8384

8485

8586
@requires_kerchunk
86-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
87+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
8788
class TestOpenVirtualDatasetIndexes:
8889
def test_no_indexes(self, netcdf4_file, hdf_backend):
8990
vds = open_virtual_dataset(netcdf4_file, indexes={}, backend=hdf_backend)
@@ -115,7 +116,7 @@ def index_mappings_equal(indexes1: Mapping[str, Index], indexes2: Mapping[str, I
115116

116117

117118
@requires_kerchunk
118-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
119+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
119120
def test_cftime_index(tmpdir, hdf_backend):
120121
"""Ensure a virtual dataset contains the same indexes as an Xarray dataset"""
121122
# Note: Test was created to debug: https://github.com/zarr-developers/VirtualiZarr/issues/168
@@ -145,7 +146,7 @@ def test_cftime_index(tmpdir, hdf_backend):
145146

146147

147148
@requires_kerchunk
148-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
149+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
149150
class TestOpenVirtualDatasetAttrs:
150151
def test_drop_array_dimensions(self, netcdf4_file, hdf_backend):
151152
# regression test for GH issue #150
@@ -166,20 +167,16 @@ def test_coordinate_variable_attrs_preserved(self, netcdf4_file, hdf_backend):
166167
@network
167168
@requires_s3fs
168169
class TestReadFromS3:
169-
@pytest.mark.parametrize(
170-
"filetype", ["netcdf4", None], ids=["netcdf4 filetype", "None filetype"]
171-
)
172170
@pytest.mark.parametrize(
173171
"indexes", [None, {}], ids=["None index", "empty dict index"]
174172
)
175-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
176-
def test_anon_read_s3(self, filetype, indexes, hdf_backend):
173+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
174+
def test_anon_read_s3(self, indexes, hdf_backend):
177175
"""Parameterized tests for empty vs supplied indexes and filetypes."""
178176
# TODO: Switch away from this s3 url after minIO is implemented.
179177
fpath = "s3://carbonplan-share/virtualizarr/local.nc"
180178
vds = open_virtual_dataset(
181179
fpath,
182-
filetype=filetype,
183180
indexes=indexes,
184181
reader_options={"storage_options": {"anon": True}},
185182
backend=hdf_backend,
@@ -191,7 +188,7 @@ def test_anon_read_s3(self, filetype, indexes, hdf_backend):
191188

192189

193190
@network
194-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
191+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
195192
class TestReadFromURL:
196193
@pytest.mark.parametrize(
197194
"filetype, url",
@@ -295,7 +292,7 @@ def test_virtualizarr_vs_local_nisar(self, hdf_backend):
295292

296293
@requires_kerchunk
297294
class TestLoadVirtualDataset:
298-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
295+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
299296
def test_loadable_variables(self, netcdf4_file, hdf_backend):
300297
vars_to_load = ["air", "time"]
301298
vds = open_virtual_dataset(
@@ -330,7 +327,7 @@ def test_explicit_filetype_and_backend(self, netcdf4_file):
330327
netcdf4_file, filetype="hdf", backend=HDFVirtualBackend
331328
)
332329

333-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
330+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
334331
def test_group_kwarg(self, hdf5_groups_file, hdf_backend):
335332
if hdf_backend:
336333
with pytest.raises(NotImplementedError, match="Nested groups"):
@@ -376,13 +373,13 @@ def test_open_virtual_dataset_passes_expected_args(
376373
}
377374
mock_read_kerchunk.assert_called_once_with(**args)
378375

379-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
376+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
380377
def test_open_dataset_with_empty(self, hdf5_empty, tmpdir, hdf_backend):
381378
vds = open_virtual_dataset(hdf5_empty, backend=hdf_backend)
382379
assert vds.empty.dims == ()
383380
assert vds.empty.attrs == {"empty": "true"}
384381

385-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
382+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
386383
def test_open_dataset_with_scalar(self, hdf5_scalar, tmpdir, hdf_backend):
387384
vds = open_virtual_dataset(hdf5_scalar, backend=hdf_backend)
388385
assert vds.scalar.dims == ()

virtualizarr/tests/test_integration.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
from virtualizarr import open_virtual_dataset
77
from virtualizarr.manifests import ChunkManifest, ManifestArray
8+
from virtualizarr.readers import HDF5VirtualBackend
89
from virtualizarr.readers.hdf import HDFVirtualBackend
910
from virtualizarr.tests import requires_kerchunk
1011
from virtualizarr.translators.kerchunk import (
@@ -64,7 +65,7 @@ def test_no_duplicates_find_var_names():
6465
),
6566
],
6667
)
67-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
68+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
6869
def test_numpy_arrays_to_inlined_kerchunk_refs(
6970
netcdf4_file, inline_threshold, vars_to_inline, hdf_backend
7071
):
@@ -92,7 +93,7 @@ def test_numpy_arrays_to_inlined_kerchunk_refs(
9293
@requires_kerchunk
9394
@pytest.mark.parametrize("format", ["dict", "json", "parquet"])
9495
class TestKerchunkRoundtrip:
95-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
96+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
9697
def test_kerchunk_roundtrip_no_concat(self, tmpdir, format, hdf_backend):
9798
# set up example xarray dataset
9899
ds = xr.tutorial.open_dataset("air_temperature", decode_times=False)
@@ -125,7 +126,7 @@ def test_kerchunk_roundtrip_no_concat(self, tmpdir, format, hdf_backend):
125126
for coord in ds.coords:
126127
assert ds.coords[coord].attrs == roundtrip.coords[coord].attrs
127128

128-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
129+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
129130
@pytest.mark.parametrize("decode_times,time_vars", [(False, []), (True, ["time"])])
130131
def test_kerchunk_roundtrip_concat(
131132
self, tmpdir, format, hdf_backend, decode_times, time_vars
@@ -195,7 +196,7 @@ def test_kerchunk_roundtrip_concat(
195196
assert roundtrip.time.encoding["units"] == ds.time.encoding["units"]
196197
assert roundtrip.time.encoding["calendar"] == ds.time.encoding["calendar"]
197198

198-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
199+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
199200
def test_non_dimension_coordinates(self, tmpdir, format, hdf_backend):
200201
# regression test for GH issue #105
201202

@@ -282,7 +283,7 @@ def test_datetime64_dtype_fill_value(self, tmpdir, format):
282283

283284

284285
@requires_kerchunk
285-
@pytest.mark.parametrize("hdf_backend", [None, HDFVirtualBackend])
286+
@pytest.mark.parametrize("hdf_backend", [HDF5VirtualBackend, HDFVirtualBackend])
286287
def test_open_scalar_variable(tmpdir, hdf_backend):
287288
# regression test for GH issue #100
288289

0 commit comments

Comments
 (0)