22
33import os
44from concurrent .futures import ThreadPoolExecutor
5- from dataclasses import dataclass
5+ from dataclasses import dataclass , field
66from itertools import pairwise
77from typing import TYPE_CHECKING , Any , TypeVar , cast
88from warnings import warn
@@ -182,7 +182,10 @@ def _get_pool(max_workers: int) -> ThreadPoolExecutor:
182182 """Get a thread pool with at most *max_workers* threads."""
183183 global _pool
184184 if _pool is None or _pool ._max_workers < max_workers :
185+ old = _pool
185186 _pool = ThreadPoolExecutor (max_workers = max_workers )
187+ if old is not None :
188+ old .shutdown (wait = False )
186189 return _pool
187190
188191
@@ -214,6 +217,8 @@ class BatchedCodecPipeline(CodecPipeline):
214217 bytes_bytes_codecs : tuple [BytesBytesCodec , ...]
215218 batch_size : int | None = None
216219
220+ _all_sync : bool = field (default = False , init = False , repr = False , compare = False )
221+
217222 def __post_init__ (self ) -> None :
218223 if self .batch_size is not None :
219224 warn (
@@ -222,11 +227,12 @@ def __post_init__(self) -> None:
222227 FutureWarning ,
223228 stacklevel = 2 ,
224229 )
225-
226- @property
227- def _all_sync (self ) -> bool :
228- """True when every codec in the chain implements SupportsSyncCodec."""
229- return all (isinstance (c , SupportsSyncCodec ) for c in self )
230+ # Compute once; frozen dataclass requires object.__setattr__.
231+ object .__setattr__ (
232+ self ,
233+ "_all_sync" ,
234+ all (isinstance (c , SupportsSyncCodec ) for c in self ),
235+ )
230236
231237 def evolve_from_array_spec (self , array_spec : ArraySpec ) -> Self :
232238 return type (self ).from_codecs (c .evolve_from_array_spec (array_spec = array_spec ) for c in self )
@@ -710,7 +716,7 @@ async def _write_chunk(
710716 )
711717
712718 # 3) Write result
713- if chunk_bytes is _DELETED or chunk_bytes is None :
719+ if chunk_bytes is _DELETED :
714720 await byte_setter .delete ()
715721 else :
716722 await byte_setter .set (chunk_bytes ) # type: ignore[arg-type]
@@ -1020,22 +1026,21 @@ def write_sync(
10201026 for encoded , (byte_setter , * _ ) in zip (encoded_list , batch_info_list , strict = False ):
10211027 if encoded is _DELETED :
10221028 byte_setter .delete_sync ()
1023- elif encoded is not None :
1024- byte_setter .set_sync (encoded )
10251029 else :
1026- byte_setter .delete_sync ( )
1030+ byte_setter .set_sync ( encoded )
10271031
10281032
10291033def codecs_from_list (
10301034 codecs : Iterable [Codec ],
10311035) -> tuple [tuple [ArrayArrayCodec , ...], ArrayBytesCodec , tuple [BytesBytesCodec , ...]]:
10321036 from zarr .codecs .sharding import ShardingCodec
10331037
1038+ codecs = list (codecs )
10341039 array_array : tuple [ArrayArrayCodec , ...] = ()
10351040 array_bytes_maybe : ArrayBytesCodec | None = None
10361041 bytes_bytes : tuple [BytesBytesCodec , ...] = ()
10371042
1038- if any (isinstance (codec , ShardingCodec ) for codec in codecs ) and len (tuple ( codecs ) ) > 1 :
1043+ if any (isinstance (codec , ShardingCodec ) for codec in codecs ) and len (codecs ) > 1 :
10391044 warn (
10401045 "Combining a `sharding_indexed` codec disables partial reads and "
10411046 "writes, which may lead to inefficient performance." ,
0 commit comments