Skip to content

Commit ed6079f

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 25857fb commit ed6079f

3 files changed

Lines changed: 27 additions & 29 deletions

File tree

virtualizarr/common.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,7 @@ def replace_virtual_with_loadable_vars(
6060
decode_times=decode_times,
6161
) as loadable_ds:
6262
var_names_to_load = get_loadable_variables(
63-
dataset=loadable_ds,
64-
loadable_variables=loadable_variables
63+
dataset=loadable_ds, loadable_variables=loadable_variables
6564
)
6665
# this will automatically keep any IndexVariables needed for loadable 1D coordinates
6766
loadable_var_names_to_drop = set(loadable_ds.variables).difference(
@@ -83,8 +82,9 @@ def replace_virtual_with_loadable_vars(
8382
],
8483
)
8584

86-
def get_loadable_variables(
87-
dataset: xr.Dataset,
85+
86+
def get_loadable_variables(
87+
dataset: xr.Dataset,
8888
loadable_variables: Iterable[Hashable] | None = None,
8989
) -> Iterable[Hashable]:
9090
var_names_to_load: list[Hashable]
@@ -97,18 +97,14 @@ def get_loadable_variables(
9797
# coordinate variables. We already have all the indexes and variables
9898
# we should be keeping - we just need to distinguish them.
9999
var_names_to_load = [
100-
name
101-
for name, var in dataset.variables.items()
102-
if var.dims == (name,)
100+
name for name, var in dataset.variables.items() if var.dims == (name,)
103101
]
104102
else:
105103
raise ValueError(
106104
"loadable_variables must be an iterable of string variable names,"
107105
f" or None, but got type {type(loadable_variables)}"
108106
)
109-
non_loadable_vars = set(dataset.variables).difference(
110-
var_names_to_load
111-
)
107+
non_loadable_vars = set(dataset.variables).difference(var_names_to_load)
112108
return var_names_to_load
113109

114110

virtualizarr/readers/hdf/hdf.py

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from __future__ import annotations
22

33
import math
4-
from pathlib import Path
54
from typing import (
65
TYPE_CHECKING,
76
Hashable,
@@ -25,13 +24,11 @@
2524
ManifestGroup,
2625
ManifestStore,
2726
)
28-
from virtualizarr.manifests.manifest import validate_and_normalize_path_to_uri
2927
from virtualizarr.manifests.utils import create_v3_array_metadata
3028
from virtualizarr.readers.api import VirtualBackend
31-
from virtualizarr.readers.hdf.filters import cfcodec_from_dataset, codecs_from_dataset
29+
from virtualizarr.readers.hdf.filters import codecs_from_dataset
3230
from virtualizarr.types import ChunkKey
33-
from virtualizarr.utils import _FsspecFSFromFilepath, soft_import
34-
31+
from virtualizarr.utils import soft_import
3532

3633
h5py = soft_import("h5py", "For reading hdf files", strict=False)
3734

@@ -85,21 +82,23 @@ def _construct_manifest_array(
8582
# Temporarily disable use CF->Codecs - TODO re-enable in subsequent PR.
8683
# cfcodec = cfcodec_from_dataset(dataset)
8784
# if cfcodec:
88-
# codecs.insert(0, cfcodec["codec"])
89-
# dtype = cfcodec["target_dtype"]
90-
# attrs.pop("scale_factor", None)
91-
# attrs.pop("add_offset", None)
85+
# codecs.insert(0, cfcodec["codec"])
86+
# dtype = cfcodec["target_dtype"]
87+
# attrs.pop("scale_factor", None)
88+
# attrs.pop("add_offset", None)
9289
# else:
93-
# dtype = dataset.dtype
90+
# dtype = dataset.dtype
9491

9592
if "_FillValue" in attrs:
96-
encoded_cf_fill_value = HDFVirtualBackend._encode_cf_fill_value(attrs["_FillValue"], dtype)
93+
encoded_cf_fill_value = HDFVirtualBackend._encode_cf_fill_value(
94+
attrs["_FillValue"], dtype
95+
)
9796
attrs["_FillValue"] = encoded_cf_fill_value
9897

9998
codec_configs = [
10099
numcodec_config_to_configurable(codec.get_config()) for codec in codecs
101100
]
102-
101+
103102
fill_value = dataset.fillvalue.item()
104103
dims = tuple(HDFVirtualBackend._dataset_dims(dataset, group=group))
105104
metadata = create_v3_array_metadata(
@@ -111,7 +110,6 @@ def _construct_manifest_array(
111110
dimension_names=dims,
112111
attributes=attrs,
113112
)
114-
115113

116114
manifest = HDFVirtualBackend._dataset_chunk_manifest(path, dataset)
117115
return ManifestArray(metadata=metadata, chunkmanifest=manifest)
@@ -161,7 +159,7 @@ def _construct_manifest_group(
161159
if variable is not None:
162160
manifest_dict[key] = variable
163161
return ManifestGroup(arrays=manifest_dict, attributes=attrs)
164-
162+
165163
@staticmethod
166164
def _create_manifest_store(
167165
filepath: str,
@@ -173,7 +171,10 @@ def _create_manifest_store(
173171
) -> ManifestStore:
174172
# Create a group containing dataset level metadata and all the manifest arrays
175173
manifest_group = HDFVirtualBackend._construct_manifest_group(
176-
store=store, filepath=filepath, group=group, drop_variables=drop_variables,
174+
store=store,
175+
filepath=filepath,
176+
group=group,
177+
drop_variables=drop_variables,
177178
)
178179
# Convert to a manifest store
179180
return ManifestStore(stores={prefix: store}, group=manifest_group)
@@ -197,13 +198,14 @@ def open_virtual_dataset(
197198
)
198199

199200
# filepath = validate_and_normalize_path_to_uri(
200-
# filepath, fs_root=Path.cwd().as_uri()
201+
# filepath, fs_root=Path.cwd().as_uri()
201202
# )
202203

203204
_drop_vars: list[Hashable] = (
204205
[] if drop_variables is None else list(drop_variables)
205206
)
206207
from obstore.store import LocalStore
208+
207209
manifest_store = HDFVirtualBackend._create_manifest_store(
208210
filepath=filepath,
209211
store=LocalStore(),
@@ -212,11 +214,10 @@ def open_virtual_dataset(
212214
group=group,
213215
)
214216
ds = manifest_store.to_virtual_dataset(
215-
loadable_variables=loadable_variables,
216-
decode_times=decode_times
217+
loadable_variables=loadable_variables, decode_times=decode_times
217218
)
218219
return ds
219-
220+
220221
@staticmethod
221222
def _dataset_chunk_manifest(
222223
path: str,

virtualizarr/tests/test_readers/test_hdf/test_hdf.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,7 @@ def test_multiple_attributes(self, string_attributes_hdf5_file):
161161
attrs = HDFVirtualBackend._extract_attrs(ds)
162162
assert len(attrs.keys()) == 2
163163

164+
164165
@requires_hdf5plugin
165166
@requires_imagecodecs
166167
class TestManifestGroupFromHDF:

0 commit comments

Comments
 (0)