Skip to content

Commit dc8d3e1

Browse files
authored
Use standard library instead of toolz for map, reduce, concat, accumulate (#890)
1 parent e55aa48 commit dc8d3e1

9 files changed

Lines changed: 24 additions & 27 deletions

File tree

cubed/array_api/manipulation_functions.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,9 @@
11
from bisect import bisect
2+
from functools import reduce
3+
from itertools import accumulate, chain
24
from operator import add, mul
35
from typing import Iterator
46

5-
import tlz
6-
from toolz import reduce
7-
87
from cubed.array_api.creation_functions import empty
98
from cubed.backend_array_api import IS_IMMUTABLE_ARRAY
109
from cubed.backend_array_api import namespace as nxp
@@ -39,7 +38,7 @@ def broadcast_arrays(*arrays):
3938

4039
# Unify uneven chunking
4140
inds = [list(reversed(range(x.ndim))) for x in arrays]
42-
uc_args = tlz.concat(zip(arrays, inds))
41+
uc_args = chain.from_iterable(zip(arrays, inds))
4342
_, args = unify_chunks(*uc_args, warn=False)
4443

4544
shape = broadcast_shapes(*(e.shape for e in args))
@@ -133,11 +132,11 @@ def concat(arrays, /, *, axis=0, chunks=None):
133132
inds = [list(range(x.ndim)) for x in arrays]
134133
for i, ind in enumerate(inds):
135134
ind[axis] = -(i + 1)
136-
uc_args = tlz.concat(zip(arrays, inds))
135+
uc_args = chain.from_iterable(zip(arrays, inds))
137136
chunkss, arrays = unify_chunks(*uc_args, warn=False)
138137

139138
# offsets along axis for the start of each array
140-
offsets = [0] + list(tlz.accumulate(add, [a.shape[axis] for a in arrays]))
139+
offsets = [0] + list(accumulate([a.shape[axis] for a in arrays], add))
141140
in_shapes = tuple(array.shape for array in arrays)
142141

143142
axis = validate_axis(axis, ndim)

cubed/core/array.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
1+
from functools import reduce
12
from operator import mul
23
from typing import Literal, Optional, TypeVar
34

4-
from toolz import map, reduce
5-
65
from cubed import config
76
from cubed.backend_array_api import namespace as nxp
87
from cubed.backend_array_api import numpy_array_to_backend_array

cubed/core/gufunc.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1+
from itertools import chain
2+
13
import numpy as np
2-
from tlz import concat, merge, unique
4+
from tlz import merge, unique
35

46
from cubed.vendor.dask.array.gufunc import _parse_gufunc_signature
57

@@ -151,7 +153,7 @@ def apply_gufunc(
151153
raise ValueError(f"Dimension `'{dim}'` with different chunksize present")
152154

153155
# Apply function - use blockwise here
154-
arginds = list(concat(zip(args, input_dimss)))
156+
arginds = list(chain.from_iterable(zip(args, input_dimss)))
155157

156158
from cubed.core.ops import blockwise
157159

cubed/core/indexing.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import math
2+
from itertools import accumulate
23
from operator import add
34
from typing import TYPE_CHECKING
45

56
import ndindex
67
import numpy as np
7-
from toolz import accumulate, map
88

99
from cubed.backend_array_api import backend_array_to_numpy_array
1010
from cubed.core.array import CoreArray
@@ -181,7 +181,9 @@ def _target_chunk_selection(target_chunks, idx, selection):
181181
offset = s.start or 0
182182
step = s.step if s.step is not None else 1
183183
start = tuple(
184-
accumulate(add, tuple(x * step for x in target_chunks[i]), offset)
184+
accumulate(
185+
tuple(x * step for x in target_chunks[i]), add, initial=offset
186+
)
185187
)
186188
j = idx[i]
187189
sel.append(slice(start[j], start[j + 1], step))
@@ -190,7 +192,7 @@ def _target_chunk_selection(target_chunks, idx, selection):
190192
elif isinstance(s, np.ndarray):
191193
# find the cumulative chunk starts
192194
target_chunk_starts = [0] + list(
193-
accumulate(add, [c for c in target_chunks[i]])
195+
accumulate([c for c in target_chunks[i]], add)
194196
)
195197
# and use to slice the integer array
196198
j = idx[i]

cubed/core/ops.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import numbers
44
from dataclasses import dataclass
55
from functools import partial
6-
from itertools import product
6+
from itertools import chain, product
77
from numbers import Integral, Number
88
from typing import (
99
TYPE_CHECKING,
@@ -20,8 +20,7 @@
2020

2121
import numpy as np
2222
import zarr
23-
from tlz import concat, first, partition
24-
from toolz import map
23+
from tlz import first, partition
2524

2625
from cubed import config
2726
from cubed.backend_array_api import IS_IMMUTABLE_ARRAY, numpy_array_to_backend_array
@@ -619,7 +618,7 @@ def elemwise(func, *args: "Array", dtype=None) -> "Array":
619618
return blockwise(
620619
func,
621620
expr_inds,
622-
*concat((a, tuple(range(a.ndim)[::-1])) for a in args),
621+
*chain.from_iterable((a, tuple(range(a.ndim)[::-1])) for a in args),
623622
dtype=dtype,
624623
)
625624

@@ -887,7 +886,7 @@ def _map_blocks(
887886
return blockwise(
888887
func,
889888
out_ind,
890-
*concat(argpairs),
889+
*chain.from_iterable(argpairs),
891890
dtype=dtype,
892891
adjust_chunks=adjust_chunks,
893892
new_axes=new_axes,

cubed/diagnostics/tqdm.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
import contextlib
22
import sys
33

4-
from toolz import map
5-
64
from cubed.runtime.pipeline import visit_nodes
75
from cubed.runtime.types import Callback
86

cubed/primitive/blockwise.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424

2525
import toolz
2626
import zarr
27-
from toolz import map
2827

2928
from cubed.backend_array_api import (
3029
backend_array_to_numpy_array,

cubed/storage/stores/tensorstore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
import dataclasses
22
import math
3+
from functools import reduce
34
from itertools import starmap
45
from operator import mul
56
from typing import Any, Dict, Optional, Union
67

78
import numpy as np
89
import tensorstore
9-
from toolz import reduce
1010

1111
from cubed.types import T_DType, T_RegularChunks, T_Shape, T_Store
1212
from cubed.utils import itemsize, join_path

cubed/utils.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,8 @@
77
import traceback
88
from collections.abc import Iterable
99
from dataclasses import dataclass
10-
from functools import partial
10+
from functools import partial, reduce
11+
from itertools import accumulate
1112
from math import prod
1213
from operator import add, mul
1314
from pathlib import Path
@@ -17,8 +18,6 @@
1718
from urllib.parse import quote, unquote, urlsplit, urlunsplit
1819

1920
import numpy as np
20-
import tlz as toolz
21-
from toolz import reduce
2221

2322
from cubed.backend_array_api import backend_dtype_to_numpy_dtype
2423
from cubed.backend_array_api import namespace as nxp
@@ -76,9 +75,9 @@ def get_item(chunks: T_RectangularChunks, idx: Tuple[int, ...]) -> Tuple[slice,
7675

7776
def _cumsum(seq, initial_zero=False):
7877
if initial_zero:
79-
return tuple(toolz.accumulate(add, seq, 0))
78+
return tuple(accumulate(seq, add, initial=0))
8079
else:
81-
return tuple(toolz.accumulate(add, seq))
80+
return tuple(accumulate(seq, add))
8281

8382

8483
def join_path(dir_url: PathType, child_path: str) -> str:

0 commit comments

Comments
 (0)