Skip to content

Commit 5d1cd3f

Browse files
Merge branch 'main' into push-olvnoqnunqsz
2 parents 92b161c + 8dee574 commit 5d1cd3f

7 files changed

Lines changed: 30 additions & 29 deletions

File tree

docs/user_guide/v4-migration.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat
3939
- `ParticleFile` output is now in Parquet format
4040
- `ParticleFile` writing behaviour now errors out if there's existing output (this be being further discussed in https://github.com/Parcels-code/Parcels/issues/2593 )
4141
- A utility to read in ParticleFile output is now available. `parcels.read_particlefile()`
42+
- "trajectory" is now called "particle_id" in the particle file output
4243

4344
## Field
4445

src/parcels/_core/particle.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def get_default_particle(spatial_dtype: type[np.float32] | type[np.float64]) ->
155155
}, # "units" and "calendar" gets updated/set if working with cftime time domain
156156
),
157157
Variable(
158-
"trajectory",
158+
"particle_id",
159159
dtype=np.int64,
160160
to_write="once",
161161
attrs={

src/parcels/_core/particlefile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ def _to_write_particles(particle_data, time):
177177
& np.equal(time, particle_data["time"], where=np.isfinite(particle_data["time"]), out=None)
178178
) # or dt is NaN and time matches particle_data["time"]
179179
)
180-
& (np.isfinite(particle_data["trajectory"]))
180+
& (np.isfinite(particle_data["particle_id"]))
181181
& (np.isfinite(particle_data["time"]))
182182
)[0]
183183

src/parcels/_core/particleset.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ class ParticleSet:
4949
Optional list of initial time values for particles. Default is fieldset.U.grid.time[0]
5050
repeatdt : datetime.timedelta or float, optional
5151
Optional interval on which to repeat the release of the ParticleSet. Either timedelta object, or float in seconds.
52-
trajectory_ids :
53-
Optional list of "trajectory" values (integers) for the particle IDs
52+
particle_ids :
53+
Optional list of "particle_id" values (integers) for the particle IDs
5454
5555
Other Variables can be initialised using further arguments (e.g. v=... for a Variable named 'v')
5656
"""
@@ -63,7 +63,7 @@ def __init__(
6363
lat=None,
6464
z=None,
6565
time=None,
66-
trajectory_ids=None,
66+
particle_ids=None,
6767
**kwargs,
6868
):
6969
self._data = None
@@ -74,8 +74,8 @@ def __init__(
7474
lat = np.empty(shape=0) if lat is None else np.array(lat).flatten()
7575
time = np.empty(shape=0) if time is None else np.array(time).flatten()
7676

77-
if trajectory_ids is None:
78-
trajectory_ids = np.arange(lon.size)
77+
if particle_ids is None:
78+
particle_ids = np.arange(lon.size)
7979

8080
if z is None:
8181
minz = 0
@@ -116,7 +116,7 @@ def __init__(
116116
lat=lat,
117117
z=z,
118118
time=time,
119-
trajectory=trajectory_ids,
119+
particle_id=particle_ids,
120120
),
121121
)
122122
self._ptype = pclass
@@ -177,7 +177,7 @@ def __repr__(self):
177177
return particleset_repr(self)
178178

179179
def __len__(self):
180-
return len(self._data["trajectory"])
180+
return len(self._data["particle_id"])
181181

182182
def add(self, particles):
183183
"""Add particles to the ParticleSet. Note that this is an
@@ -208,11 +208,11 @@ def add(self, particles):
208208
return
209209

210210
if isinstance(particles, type(self)):
211-
if len(self._data["trajectory"]) > 0:
212-
offset = self._data["trajectory"].max() + 1
211+
if len(self._data["particle_id"]) > 0:
212+
offset = self._data["particle_id"].max() + 1
213213
else:
214214
offset = 0
215-
particles._data["trajectory"] = particles._data["trajectory"] + offset
215+
particles._data["particle_id"] = particles._data["particle_id"] + offset
216216

217217
for d in self._data:
218218
self._data[d] = np.concatenate((self._data[d], particles._data[d]))

tests/test_particlefile.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test_pfile_array_remove_all_particles(fieldset, tmp_parquet):
111111
pfile.close()
112112

113113
df = pd.read_parquet(tmp_parquet)
114-
assert df["trajectory"].nunique() == npart
114+
assert df["particle_id"].nunique() == npart
115115

116116

117117
def test_write_dtypes_pfile(fieldset, tmp_parquet):
@@ -219,9 +219,9 @@ def test_write_timebackward(fieldset, tmp_parquet):
219219

220220
df = pd.read_parquet(tmp_parquet)
221221

222-
assert df["trajectory"].dtype == "int64"
222+
assert df["particle_id"].dtype == "int64"
223223
assert bool(
224-
df.groupby("trajectory")
224+
df.groupby("particle_id")
225225
.apply(
226226
lambda x: (np.diff(x["time"]) < 0).all() # for each particle - set True if it has decreasing time
227227
)
@@ -302,8 +302,8 @@ def IncreaseAge(particles, fieldset): # pragma: no cover
302302

303303
df = parcels.read_particlefile(tmp_parquet)
304304

305-
# Map sorted trajectory IDs to release times (0, 1, ..., npart-1 seconds)
306-
for index, df_traj in df.groupby("trajectory"):
305+
# Map sorted particle IDs to release times (0, 1, ..., npart-1 seconds)
306+
for index, df_traj in df.groupby("particle_id"):
307307
release_time = time[index]
308308
np.testing.assert_equal(df_traj["age"].astype("timedelta64[s]").values, (df_traj["time"] - release_time).values)
309309

@@ -367,7 +367,7 @@ def test_pset_execute_outputdt_forwards(fieldset):
367367
dt = timedelta(minutes=5)
368368

369369
df = setup_pset_execute(fieldset=fieldset, outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt))
370-
particle_0_times = df[df.trajectory == 0].time.values
370+
particle_0_times = df[df["particle_id"] == 0].time.values
371371

372372
np.testing.assert_equal(np.diff(particle_0_times), outputdt.seconds)
373373

@@ -390,7 +390,7 @@ def test_pset_execute_outputdt_backwards(fieldset):
390390
dt = -timedelta(minutes=5)
391391

392392
df = setup_pset_execute(fieldset=fieldset, outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt))
393-
particle_0_times = df[df.trajectory == 0].time.values
393+
particle_0_times = df[df["particle_id"] == 0].time.values
394394
np.testing.assert_equal(np.diff(particle_0_times), -outputdt.seconds)
395395

396396

@@ -409,7 +409,7 @@ def test_pset_execute_outputdt_backwards_fieldset_timevarying():
409409
fieldset = FieldSet.from_sgrid_conventions(ds_fset)
410410

411411
df = setup_pset_execute(outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt), fieldset=fieldset)
412-
particle_0_times = df[df.trajectory == 0].time.values
412+
particle_0_times = df[df["particle_id"] == 0].time.values
413413
np.testing.assert_equal(np.diff(particle_0_times), -outputdt.seconds)
414414

415415

tests/test_particleset.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,8 @@ def test_pset_with_pids(fieldset, offset, npart=100):
5252
lon = np.linspace(0, 1, npart)
5353
lat = np.linspace(1, 0, npart)
5454
trajectory_ids = np.arange(offset, npart + offset)
55-
pset = ParticleSet(fieldset, lon=lon, lat=lat, trajectory_ids=trajectory_ids)
56-
assert np.allclose([p.trajectory for p in pset], trajectory_ids, atol=1e-12)
55+
pset = ParticleSet(fieldset, lon=lon, lat=lat, particle_ids=trajectory_ids)
56+
assert np.allclose([p.particle_id for p in pset], trajectory_ids, atol=1e-12)
5757

5858

5959
@pytest.mark.parametrize("aslist", [True, False])
@@ -146,14 +146,14 @@ def test_pset_add_explicit(fieldset):
146146
assert len(pset) == npart
147147
assert np.allclose([p.lon for p in pset], lon, atol=1e-12)
148148
assert np.allclose([p.lat for p in pset], lat, atol=1e-12)
149-
assert np.allclose(np.diff(pset._data["trajectory"]), np.ones(pset._data["trajectory"].size - 1), atol=1e-12)
149+
assert np.allclose(np.diff(pset._data["particle_id"]), np.ones(pset._data["particle_id"].size - 1), atol=1e-12)
150150

151151

152152
def test_pset_add_implicit(fieldset):
153153
pset = ParticleSet(fieldset, lon=np.zeros(3), lat=np.ones(3), pclass=Particle)
154154
pset += ParticleSet(fieldset, lon=np.ones(4), lat=np.zeros(4), pclass=Particle)
155155
assert len(pset) == 7
156-
assert np.allclose(np.diff(pset._data["trajectory"]), np.ones(6), atol=1e-12)
156+
assert np.allclose(np.diff(pset._data["particle_id"]), np.ones(6), atol=1e-12)
157157

158158

159159
def test_pset_add_implicit_in_loop(fieldset, npart=10):
@@ -179,12 +179,12 @@ def test_pset_remove_index(fieldset, npart=100):
179179
indices_to_remove = [0, 10, 20]
180180
pset.remove_indices(indices_to_remove)
181181
assert pset.size == 97
182-
assert not np.any(np.isin(pset.trajectory, indices_to_remove))
182+
assert not np.any(np.isin(pset.particle_id, indices_to_remove))
183183

184184

185185
def test_pset_iterator(fieldset):
186186
npart = 10
187187
pset = ParticleSet(fieldset, lon=np.zeros(npart), lat=np.ones(npart))
188188
for i, particle in enumerate(pset):
189-
assert particle.trajectory == i
189+
assert particle.particle_id == i
190190
assert i == npart - 1

tests/test_particleset_execute.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -228,9 +228,9 @@ def DeleteKernel(particles, fieldset): # pragma: no cover
228228

229229
pset.execute(DeleteKernel, runtime=np.timedelta64(1, "s"), dt=np.timedelta64(1, "s"))
230230
indices = [i for i in range(npart) if not (40 <= i < 60)]
231-
assert [p.trajectory for p in pset] == indices
232-
assert pset[70].trajectory == 90
233-
assert pset[-1].trajectory == npart - 1
231+
assert [p.particle_id for p in pset] == indices
232+
assert pset[70].particle_id == 90
233+
assert pset[-1].particle_id == npart - 1
234234
assert pset.size == 80
235235

236236

0 commit comments

Comments
 (0)