Skip to content

Commit 11534b0

Browse files
committed
restore old comments / docstrings
1 parent fbde3af commit 11534b0

File tree

1 file changed

+28
-2
lines changed

1 file changed

+28
-2
lines changed

src/zarr/core/codec_pipeline.py

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -249,12 +249,32 @@ def from_codecs(cls, codecs: Iterable[Codec]) -> Self:
249249

250250
@property
251251
def supports_partial_decode(self) -> bool:
252+
"""Determines whether the codec pipeline supports partial decoding.
253+
254+
Currently, only codec pipelines with a single ArrayBytesCodec that supports
255+
partial decoding can support partial decoding. This limitation is due to the fact
256+
that ArrayArrayCodecs can change the slice selection leading to non-contiguous
257+
slices and BytesBytesCodecs can change the chunk bytes in a way that slice
258+
selections cannot be attributed to byte ranges anymore which renders partial
259+
decoding infeasible.
260+
261+
This limitation may softened in the future."""
252262
return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance(
253263
self.array_bytes_codec, ArrayBytesCodecPartialDecodeMixin
254264
)
255265

256266
@property
257267
def supports_partial_encode(self) -> bool:
268+
"""Determines whether the codec pipeline supports partial encoding.
269+
270+
Currently, only codec pipelines with a single ArrayBytesCodec that supports
271+
partial encoding can support partial encoding. This limitation is due to the fact
272+
that ArrayArrayCodecs can change the slice selection leading to non-contiguous
273+
slices and BytesBytesCodecs can change the chunk bytes in a way that slice
274+
selections cannot be attributed to byte ranges anymore which renders partial
275+
encoding infeasible.
276+
277+
This limitation may softened in the future."""
258278
return (len(self.array_array_codecs) + len(self.bytes_bytes_codecs)) == 0 and isinstance(
259279
self.array_bytes_codec, ArrayBytesCodecPartialEncodeMixin
260280
)
@@ -637,6 +657,7 @@ def _merge_chunk_array(
637657
if (
638658
is_complete_chunk
639659
and value.shape == chunk_spec.shape
660+
# Guard that this is not a partial chunk at the end with is_complete_chunk=True
640661
and value[out_selection].shape == chunk_spec.shape
641662
):
642663
return value
@@ -648,16 +669,20 @@ def _merge_chunk_array(
648669
fill_value=fill_value_or_default(chunk_spec),
649670
)
650671
else:
651-
chunk_array = existing_chunk_array.copy()
672+
chunk_array = existing_chunk_array.copy() # make a writable copy
652673
if chunk_selection == () or is_scalar(
653674
value.as_ndarray_like(), chunk_spec.dtype.to_native_dtype()
654675
):
655676
chunk_value = value
656677
else:
657678
chunk_value = value[out_selection]
679+
# handle missing singleton dimensions
658680
if drop_axes != ():
659681
item = tuple(
660-
None if idx in drop_axes else slice(None) for idx in range(chunk_spec.ndim)
682+
None # equivalent to np.newaxis
683+
if idx in drop_axes
684+
else slice(None)
685+
for idx in range(chunk_spec.ndim)
661686
)
662687
chunk_value = chunk_value[item]
663688
chunk_array[chunk_selection] = chunk_value
@@ -672,6 +697,7 @@ async def write_batch(
672697
batch_info = list(batch_info)
673698

674699
if self.supports_partial_encode:
700+
# Pass scalar values as is
675701
if len(value.shape) == 0:
676702
await self.encode_partial_batch(
677703
[

0 commit comments

Comments
 (0)