Skip to content

Commit a1c0617

Browse files
maint: bump minimum dependency versions according to policy (#11231)
* maint: bump minimum versions according to policy * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * whats-new * fixup * fixups for minimal envs * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fixups * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * revert numpy bump * update whats new --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 6c36d81 commit a1c0617

File tree

7 files changed

+110
-35
lines changed

7 files changed

+110
-35
lines changed

ci/policy.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ policy:
2626
- hypothesis
2727
- pytz
2828
- pytest-reportlog
29+
- pyarrow # transitive dependency of dask.dataframe, not an xarray dependency
2930
# these packages don't fail the CI, but will be printed in the report
3031
ignored_violations:
3132
- array-api-strict

doc/whats-new.rst

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,71 @@ New Features
1717

1818
Breaking Changes
1919
~~~~~~~~~~~~~~~~
20+
- The minimum versions of some dependencies were changed (see table below).
21+
Notably, the minimum ``zarr`` version is now 3.0. Zarr v2 format data is
22+
still readable via ``zarr-python`` 3's built-in compatibility layer; however,
23+
``zarr-python`` 2 is no longer a supported dependency.
24+
By `Joe Hamman <https://github.com/jhamman>`_.
25+
26+
.. list-table::
27+
:header-rows: 1
28+
:widths: 30 20 20
29+
30+
* - Dependency
31+
- Old Version
32+
- New Version
33+
* - boto3
34+
- 1.34
35+
- 1.37
36+
* - cartopy
37+
- 0.23
38+
- 0.24
39+
* - dask-core
40+
- 2024.6
41+
- 2025.2
42+
* - distributed
43+
- 2024.6
44+
- 2025.2
45+
* - flox
46+
- 0.9
47+
- 0.10
48+
* - h5netcdf
49+
- 1.4
50+
- 1.5
51+
* - h5py
52+
- 3.11
53+
- 3.13
54+
* - iris
55+
- 3.9
56+
- 3.11
57+
* - lxml
58+
- 5.1
59+
- 5.3
60+
* - matplotlib-base
61+
- 3.8
62+
- 3.10
63+
* - numba
64+
- 0.60
65+
- 0.61
66+
* - numbagg
67+
- 0.8
68+
- 0.9
69+
* - packaging
70+
- 24.1
71+
- 24.2
72+
* - rasterio
73+
- 1.3
74+
- 1.4
75+
* - scipy
76+
- 1.13
77+
- 1.15
78+
* - toolz
79+
- 0.12
80+
- 1.0
81+
* - zarr
82+
- 2.18
83+
- 3.0
84+
2085
- Xarray will no longer by default decode a variable into a
2186
:py:class:`np.timedelta64` dtype based on the presence of a timedelta-like
2287
``"units"`` attribute alone. Instead it will rely on the presence of a

pixi.toml

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ python = "*"
2020
numpy = "*"
2121
pandas = "*"
2222

23-
packaging = "24.1.*"
23+
packaging = "24.2.*"
2424
git = "*" # needed for dynamic versioning
2525

2626
[dependencies]
@@ -107,40 +107,41 @@ numpy = "1.26.*"
107107
pandas = "2.2.*"
108108

109109
[feature.minimum-scipy.dependencies]
110-
scipy = "1.13.*"
110+
scipy = "1.15.*"
111111

112112
[feature.min-versions.dependencies]
113113
array-api-strict = "2.4.*" # dependency for testing the array api compat
114-
boto3 = "1.34.*"
114+
boto3 = "1.37.*"
115115
bottleneck = "1.4.*"
116-
cartopy = "0.23.*"
116+
cartopy = "0.24.*"
117117
cftime = "1.6.*"
118-
dask-core = "2024.6.*"
119-
distributed = "2024.6.*"
120-
flox = "0.9.*"
121-
h5netcdf = "1.4.*"
118+
dask-core = "2025.2.*"
119+
distributed = "2025.2.*"
120+
flox = "0.10.*"
121+
h5netcdf = "1.5.*"
122122
# h5py and hdf5 tend to cause conflicts
123123
# for e.g. hdf5 1.12 conflicts with h5py=3.1
124124
# prioritize bumping other packages instead
125-
h5py = "3.11.*"
125+
h5py = "3.13.*"
126126
hdf5 = "1.14.*"
127-
iris = "3.9.*"
128-
lxml = "5.1.*" # Optional dep of pydap
129-
matplotlib-base = "3.8.*"
127+
iris = "3.11.*"
128+
lxml = "5.3.*" # Optional dep of pydap
129+
matplotlib-base = "3.10.*"
130130
nc-time-axis = "1.4.*"
131131
# netcdf follows a 1.major.minor[.patch] convention
132132
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
133133
netcdf4 = "1.6.*"
134-
numba = "0.60.*"
135-
numbagg = "0.8.*"
136-
packaging = "24.1.*"
134+
numba = "0.61.*"
135+
numbagg = "0.9.*"
136+
packaging = "24.2.*"
137137
pint = "0.24.*"
138138
pydap = "3.5.*"
139-
rasterio = "1.3.*"
139+
rasterio = "1.4.*"
140140
seaborn = "0.13.*"
141141
sparse = "0.15.*"
142-
toolz = "0.12.*"
143-
zarr = "2.18.*"
142+
toolz = "1.0.*"
143+
zarr = "3.0.*"
144+
pyarrow = "*" # required by dask.dataframe
144145

145146
# TODO: Remove `target.unix` restriction once pandas nightly has win-64 wheels again.
146147
# Without this, `pixi lock` fails because it can't solve the nightly feature for win-64,

pyproject.toml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ name = "xarray"
1818
readme = "README.md"
1919
requires-python = ">=3.11"
2020

21-
dependencies = ["numpy>=1.26", "packaging>=24.1", "pandas>=2.2"]
21+
dependencies = ["numpy>=1.26", "packaging>=24.2", "pandas>=2.2"]
2222

2323
# We don't encode minimum requirements here (though if we can write a script to
2424
# generate the text from `min_deps_check.py`, that's welcome...). We do add
@@ -27,27 +27,27 @@ dependencies = ["numpy>=1.26", "packaging>=24.1", "pandas>=2.2"]
2727

2828
[project.optional-dependencies]
2929
accel = [
30-
"scipy>=1.13",
30+
"scipy>=1.15",
3131
"bottleneck",
32-
"numbagg>=0.8",
32+
"numbagg>=0.9",
3333
"numba>=0.62", # numba 0.62 added support for numpy 2.3
34-
"flox>=0.9",
34+
"flox>=0.10",
3535
"opt_einsum",
3636
]
3737
complete = ["xarray[accel,etc,io,parallel,viz]"]
3838
io = [
3939
"netCDF4>=1.6.0",
40-
"h5netcdf[h5py]>=1.4.0",
40+
"h5netcdf[h5py]>=1.5.0",
4141
"pydap",
42-
"scipy>=1.13",
43-
"zarr>=2.18",
42+
"scipy>=1.15",
43+
"zarr>=3.0",
4444
"fsspec",
4545
"cftime",
4646
"pooch",
4747
]
4848
etc = ["sparse>=0.15"]
4949
parallel = ["dask[complete]"]
50-
viz = ["cartopy>=0.23", "matplotlib>=3.8", "nc-time-axis", "seaborn"]
50+
viz = ["cartopy>=0.24", "matplotlib>=3.10", "nc-time-axis", "seaborn"]
5151
types = [
5252
"pandas-stubs",
5353
"scipy-stubs",

xarray/testing/assertions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ def _data_allclose_or_equiv(arr1, arr2, rtol=1e-05, atol=1e-08, decode_bytes=Tru
4545
if any(arr.dtype.kind == "S" for arr in [arr1, arr2]) and decode_bytes:
4646
arr1 = _decode_string_data(arr1)
4747
arr2 = _decode_string_data(arr2)
48-
exact_dtypes = ["M", "m", "O", "S", "U"]
48+
exact_dtypes = ["M", "m", "O", "S", "U", "T"]
4949
if any(arr.dtype.kind in exact_dtypes for arr in [arr1, arr2]):
5050
return duck_array_ops.array_equiv(arr1, arr2)
5151
else:

xarray/tests/test_backends.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -524,7 +524,7 @@ def check_dtypes_roundtripped(self, expected, actual):
524524

525525
actual_dtype = actual.variables[k].dtype
526526
# TODO: check expected behavior for string dtypes more carefully
527-
string_kinds = {"O", "S", "U"}
527+
string_kinds = {"O", "S", "U", "T"}
528528
assert expected_dtype == actual_dtype or (
529529
expected_dtype.kind in string_kinds
530530
and actual_dtype.kind in string_kinds

xarray/tests/test_backends_datatree.py

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -704,20 +704,28 @@ def test_zarr_encoding(self, tmpdir, simple_datatree, zarr_format) -> None:
704704
codec = Blosc(cname="zstd", clevel=3, shuffle=2)
705705
comp = {"compressors": (codec,)} if has_zarr_v3 else {"compressor": codec}
706706
elif zarr_format == 3:
707-
# specifying codecs in zarr_format=3 requires importing from zarr 3 namespace
708-
from zarr.registry import get_codec_class
707+
import zarr
709708

710-
Blosc = get_codec_class("numcodecs.blosc")
711-
comp = {"compressors": (Blosc(cname="zstd", clevel=3),)} # type: ignore[call-arg]
709+
comp = {
710+
"compressors": (zarr.codecs.BloscCodec(cname="zstd", clevel=3),),
711+
}
712712

713713
enc = {"/set2": dict.fromkeys(original_dt["/set2"].dataset.data_vars, comp)}
714714
original_dt.to_zarr(filepath, encoding=enc, zarr_format=zarr_format)
715715

716716
with open_datatree(filepath, engine="zarr") as roundtrip_dt:
717717
compressor_key = "compressors" if has_zarr_v3 else "compressor"
718-
assert (
719-
roundtrip_dt["/set2/a"].encoding[compressor_key] == comp[compressor_key]
720-
)
718+
if zarr_format == 3:
719+
# zarr v3 BloscCodec auto-tunes typesize and shuffle on write,
720+
# so we only check the attributes we explicitly set
721+
rt_codec = roundtrip_dt["/set2/a"].encoding[compressor_key][0]
722+
assert rt_codec.cname.value == "zstd"
723+
assert rt_codec.clevel == 3
724+
else:
725+
assert (
726+
roundtrip_dt["/set2/a"].encoding[compressor_key]
727+
== comp[compressor_key]
728+
)
721729

722730
enc["/not/a/group"] = {"foo": "bar"} # type: ignore[dict-item]
723731
with pytest.raises(ValueError, match=r"unexpected encoding group.*"):

0 commit comments

Comments
 (0)