1111from virtualizarr import open_virtual_dataset
1212from virtualizarr .backend import FileType , automatically_determine_filetype
1313from virtualizarr .manifests import ManifestArray
14+ from virtualizarr .readers import HDF5VirtualBackend
1415from virtualizarr .readers .hdf import HDFVirtualBackend
1516from virtualizarr .tests import (
1617 has_astropy ,
@@ -83,7 +84,7 @@ def test_FileType():
8384
8485
8586@requires_kerchunk
86- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
87+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
8788class TestOpenVirtualDatasetIndexes :
8889 def test_no_indexes (self , netcdf4_file , hdf_backend ):
8990 vds = open_virtual_dataset (netcdf4_file , indexes = {}, backend = hdf_backend )
@@ -115,7 +116,7 @@ def index_mappings_equal(indexes1: Mapping[str, Index], indexes2: Mapping[str, I
115116
116117
117118@requires_kerchunk
118- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
119+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
119120def test_cftime_index (tmpdir , hdf_backend ):
120121 """Ensure a virtual dataset contains the same indexes as an Xarray dataset"""
121122 # Note: Test was created to debug: https://github.com/zarr-developers/VirtualiZarr/issues/168
@@ -145,7 +146,7 @@ def test_cftime_index(tmpdir, hdf_backend):
145146
146147
147148@requires_kerchunk
148- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
149+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
149150class TestOpenVirtualDatasetAttrs :
150151 def test_drop_array_dimensions (self , netcdf4_file , hdf_backend ):
151152 # regression test for GH issue #150
@@ -166,20 +167,16 @@ def test_coordinate_variable_attrs_preserved(self, netcdf4_file, hdf_backend):
166167@network
167168@requires_s3fs
168169class TestReadFromS3 :
169- @pytest .mark .parametrize (
170- "filetype" , ["netcdf4" , None ], ids = ["netcdf4 filetype" , "None filetype" ]
171- )
172170 @pytest .mark .parametrize (
173171 "indexes" , [None , {}], ids = ["None index" , "empty dict index" ]
174172 )
175- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
176- def test_anon_read_s3 (self , filetype , indexes , hdf_backend ):
173+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
174+ def test_anon_read_s3 (self , indexes , hdf_backend ):
177175 """Parameterized tests for empty vs supplied indexes and filetypes."""
178176 # TODO: Switch away from this s3 url after minIO is implemented.
179177 fpath = "s3://carbonplan-share/virtualizarr/local.nc"
180178 vds = open_virtual_dataset (
181179 fpath ,
182- filetype = filetype ,
183180 indexes = indexes ,
184181 reader_options = {"storage_options" : {"anon" : True }},
185182 backend = hdf_backend ,
@@ -191,7 +188,7 @@ def test_anon_read_s3(self, filetype, indexes, hdf_backend):
191188
192189
193190@network
194- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
191+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
195192class TestReadFromURL :
196193 @pytest .mark .parametrize (
197194 "filetype, url" ,
@@ -295,7 +292,7 @@ def test_virtualizarr_vs_local_nisar(self, hdf_backend):
295292
296293@requires_kerchunk
297294class TestLoadVirtualDataset :
298- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
295+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
299296 def test_loadable_variables (self , netcdf4_file , hdf_backend ):
300297 vars_to_load = ["air" , "time" ]
301298 vds = open_virtual_dataset (
@@ -330,7 +327,7 @@ def test_explicit_filetype_and_backend(self, netcdf4_file):
330327 netcdf4_file , filetype = "hdf" , backend = HDFVirtualBackend
331328 )
332329
333- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
330+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
334331 def test_group_kwarg (self , hdf5_groups_file , hdf_backend ):
335332 if hdf_backend :
336333 with pytest .raises (NotImplementedError , match = "Nested groups" ):
@@ -376,13 +373,13 @@ def test_open_virtual_dataset_passes_expected_args(
376373 }
377374 mock_read_kerchunk .assert_called_once_with (** args )
378375
379- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
376+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
380377 def test_open_dataset_with_empty (self , hdf5_empty , tmpdir , hdf_backend ):
381378 vds = open_virtual_dataset (hdf5_empty , backend = hdf_backend )
382379 assert vds .empty .dims == ()
383380 assert vds .empty .attrs == {"empty" : "true" }
384381
385- @pytest .mark .parametrize ("hdf_backend" , [None , HDFVirtualBackend ])
382+ @pytest .mark .parametrize ("hdf_backend" , [HDF5VirtualBackend , HDFVirtualBackend ])
386383 def test_open_dataset_with_scalar (self , hdf5_scalar , tmpdir , hdf_backend ):
387384 vds = open_virtual_dataset (hdf5_scalar , backend = hdf_backend )
388385 assert vds .scalar .dims == ()
0 commit comments