Skip to content

Commit 286558c

Browse files
Merge pull request #1672 from andrew-s28/logging-updates
Implement warnings in Parcels
2 parents 679d06d + 024aa35 commit 286558c

20 files changed

Lines changed: 396 additions & 197 deletions

docs/examples/documentation_indexing.ipynb

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@
5656
"source": [
5757
"from glob import glob\n",
5858
"from os import path\n",
59+
"import warnings\n",
5960
"\n",
6061
"import numpy as np\n",
6162
"\n",
@@ -179,17 +180,17 @@
179180
" \"W\": c_grid_dimensions,\n",
180181
"}\n",
181182
"\n",
182-
"fieldsetC = parcels.FieldSet.from_nemo(\n",
183-
" filenames, variables, dimensions, netcdf_decodewarning=False\n",
184-
")"
183+
"with warnings.catch_warnings():\n",
184+
" warnings.simplefilter(\"ignore\", parcels.FileWarning)\n",
185+
" fieldsetC = parcels.FieldSet.from_nemo(filenames, variables, dimensions)"
185186
]
186187
},
187188
{
188189
"attachments": {},
189190
"cell_type": "markdown",
190191
"metadata": {},
191192
"source": [
192-
"Note by the way, that we used `netcdf_decodewarning=False` in the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
193+
"Note by the way, that we used `warnings.catch_warnings()` with `warnings.simplefilter(\"ignore\", parcels.FileWarning)` to wrap the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`.\n"
193194
]
194195
},
195196
{
@@ -293,7 +294,7 @@
293294
"name": "python",
294295
"nbconvert_exporter": "python",
295296
"pygments_lexer": "ipython3",
296-
"version": "3.11.6"
297+
"version": "3.12.4"
297298
}
298299
},
299300
"nbformat": 4,

docs/examples/tutorial_nemo_3D.ipynb

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,16 @@
5454
"source": [
5555
"from datetime import timedelta\n",
5656
"from glob import glob\n",
57+
"import warnings\n",
5758
"\n",
5859
"import matplotlib.pyplot as plt\n",
5960
"import xarray as xr\n",
6061
"\n",
6162
"import parcels\n",
62-
"from parcels import logger\n",
63+
"from parcels import FileWarning\n",
6364
"\n",
6465
"# Add a filter for the xarray decoding warning\n",
65-
"logger.addFilter(parcels.XarrayDecodedFilter())\n",
66+
"warnings.simplefilter(\"ignore\", FileWarning)\n",
6667
"\n",
6768
"example_dataset_folder = parcels.download_example_dataset(\n",
6869
" \"NemoNorthSeaORCA025-N006_data\"\n",
@@ -234,7 +235,7 @@
234235
"name": "python",
235236
"nbconvert_exporter": "python",
236237
"pygments_lexer": "ipython3",
237-
"version": "3.12.4"
238+
"version": "3.12.5"
238239
}
239240
},
240241
"nbformat": 4,

docs/examples/tutorial_timestamps.ipynb

Lines changed: 50 additions & 45 deletions
Large diffs are not rendered by default.

docs/reference/misc.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,12 @@ parcels.tools.loggers module
2929
:members:
3030
:undoc-members:
3131

32+
parcels.tools.warnings module
33+
34+
.. automodule:: parcels.tools.warnings
35+
:members:
36+
:undoc-members:
37+
3238
parcels.tools.exampledata_utils module
3339
--------------------------------------
3440

parcels/compilation/codegenerator.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import collections
33
import math
44
import random
5+
import warnings
56
from abc import ABC
67
from copy import copy
78

@@ -10,8 +11,8 @@
1011
from parcels.field import Field, NestedField, VectorField
1112
from parcels.grid import Grid
1213
from parcels.particle import JITParticle
13-
from parcels.tools.loggers import logger
1414
from parcels.tools.statuscodes import StatusCode
15+
from parcels.tools.warnings import KernelWarning
1516

1617

1718
class IntrinsicNode(ast.AST):
@@ -178,9 +179,11 @@ def __init__(self, obj, attr):
178179

179180
class ParticleXiYiZiTiAttributeNode(IntrinsicNode):
180181
def __init__(self, obj, attr):
181-
logger.warning_once(
182+
warnings.warn(
182183
f"Be careful when sampling particle.{attr}, as this is updated in the kernel loop. "
183-
"Best to place the sampling statement before advection."
184+
"Best to place the sampling statement before advection.",
185+
KernelWarning,
186+
stacklevel=2,
184187
)
185188
self.obj = obj.ccode
186189
self.attr = attr
@@ -309,8 +312,10 @@ def visit_Subscript(self, node):
309312
def visit_AugAssign(self, node):
310313
node.target = self.visit(node.target)
311314
if isinstance(node.target, ParticleAttributeNode) and node.target.attr in ["lon", "lat", "depth", "time"]:
312-
logger.warning_once(
313-
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc."
315+
warnings.warn(
316+
"Don't change the location of a particle directly in a Kernel. Use particle_dlon, particle_dlat, etc.",
317+
KernelWarning,
318+
stacklevel=2,
314319
)
315320
node.op = self.visit(node.op)
316321
node.value = self.visit(node.value)
@@ -439,7 +444,11 @@ def generate(self, py_ast, funcvars: list[str]):
439444
for kvar in funcvars:
440445
if kvar in used_vars + ["particle_dlon", "particle_dlat", "particle_ddepth"]:
441446
if kvar not in ["particle", "fieldset", "time", "particle_dlon", "particle_dlat", "particle_ddepth"]:
442-
logger.warning(kvar + " declared in multiple Kernels")
447+
warnings.warn(
448+
kvar + " declared in multiple Kernels",
449+
KernelWarning,
450+
stacklevel=2,
451+
)
443452
funcvars_copy.remove(kvar)
444453
else:
445454
used_vars.append(kvar)

parcels/field.py

Lines changed: 39 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import collections
22
import datetime
33
import math
4+
import warnings
45
from ctypes import POINTER, Structure, c_float, c_int, pointer
56
from pathlib import Path
67
from typing import TYPE_CHECKING, Iterable, Type
@@ -18,14 +19,14 @@
1819
UnitConverter,
1920
unitconverters_map,
2021
)
21-
from parcels.tools.loggers import logger
2222
from parcels.tools.statuscodes import (
2323
AllParcelsErrorCodes,
2424
FieldOutOfBoundError,
2525
FieldOutOfBoundSurfaceError,
2626
FieldSamplingError,
2727
TimeExtrapolationError,
2828
)
29+
from parcels.tools.warnings import FieldSetWarning, _deprecated_param_netcdf_decodewarning
2930

3031
from .fieldfilebuffer import (
3132
DaskFileBuffer,
@@ -163,6 +164,10 @@ def __init__(
163164
to_write=False,
164165
**kwargs,
165166
):
167+
if kwargs.get("netcdf_decodewarning") is not None:
168+
_deprecated_param_netcdf_decodewarning()
169+
kwargs.pop("netcdf_decodewarning")
170+
166171
if not isinstance(name, tuple):
167172
self.name = name
168173
self.filebuffername = name
@@ -211,8 +216,10 @@ def __init__(
211216
GridType.RectilinearSGrid,
212217
GridType.CurvilinearSGrid,
213218
]:
214-
logger.warning_once( # type: ignore
215-
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal."
219+
warnings.warn(
220+
"General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal.",
221+
FieldSetWarning,
222+
stacklevel=2,
216223
)
217224

218225
self.fieldset: "FieldSet" | None = None
@@ -223,9 +230,10 @@ def __init__(
223230

224231
self.time_periodic = time_periodic
225232
if self.time_periodic is not False and self.allow_time_extrapolation:
226-
logger.warning_once( # type: ignore
227-
"allow_time_extrapolation and time_periodic cannot be used together.\n \
228-
allow_time_extrapolation is set to False"
233+
warnings.warn(
234+
"allow_time_extrapolation and time_periodic cannot be used together. allow_time_extrapolation is set to False",
235+
FieldSetWarning,
236+
stacklevel=2,
229237
)
230238
self.allow_time_extrapolation = False
231239
if self.time_periodic is True:
@@ -275,9 +283,8 @@ def __init__(
275283
self.dataFiles = np.append(self.dataFiles, self.dataFiles[0])
276284
self._field_fb_class = kwargs.pop("FieldFileBuffer", None)
277285
self.netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
278-
self.netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
279-
self.loaded_time_indices: Iterable[int] = []
280-
self.creation_log: str = kwargs.pop("creation_log", "")
286+
self.loaded_time_indices: Iterable[int] = [] # type: ignore
287+
self.creation_log = kwargs.pop("creation_log", "")
281288
self.chunksize = kwargs.pop("chunksize", None)
282289
self.netcdf_chunkdims_name_map = kwargs.pop("chunkdims_name_map", None)
283290
self.grid.depth_field = kwargs.pop("depth_field", None)
@@ -315,8 +322,10 @@ def get_dim_filenames(cls, filenames, dim):
315322

316323
@staticmethod
317324
def collect_timeslices(
318-
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=True
325+
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning=None
319326
):
327+
if netcdf_decodewarning is not None:
328+
_deprecated_param_netcdf_decodewarning()
320329
if timestamps is not None:
321330
dataFiles = []
322331
for findex in range(len(data_filenames)):
@@ -329,9 +338,7 @@ def collect_timeslices(
329338
timeslices = []
330339
dataFiles = []
331340
for fname in data_filenames:
332-
with _grid_fb_class(
333-
fname, dimensions, indices, netcdf_engine=netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
334-
) as filebuffer:
341+
with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer:
335342
ftime = filebuffer.time
336343
timeslices.append(ftime)
337344
dataFiles.append([fname] * len(ftime))
@@ -408,7 +415,7 @@ def from_netcdf(
408415
chunksize :
409416
size of the chunks in dask loading
410417
netcdf_decodewarning : bool
411-
Whether to show a warning id there is a problem decoding the netcdf files.
418+
(DEPRECATED - v3.1.0) Whether to show a warning if there is a problem decoding the netcdf files.
412419
Default is True, but in some cases where these warnings are expected, it may be useful to silence them
413420
by setting netcdf_decodewarning=False.
414421
grid :
@@ -423,6 +430,10 @@ def from_netcdf(
423430
* `Timestamps <../examples/tutorial_timestamps.ipynb>`__
424431
425432
"""
433+
if kwargs.get("netcdf_decodewarning") is not None:
434+
_deprecated_param_netcdf_decodewarning()
435+
kwargs.pop("netcdf_decodewarning")
436+
426437
# Ensure the timestamps array is compatible with the user-provided datafiles.
427438
if timestamps is not None:
428439
if isinstance(filenames, list):
@@ -475,7 +486,6 @@ def from_netcdf(
475486
depth_filename = depth_filename[0]
476487

477488
netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
478-
netcdf_decodewarning = kwargs.pop("netcdf_decodewarning", True)
479489

480490
indices = {} if indices is None else indices.copy()
481491
for ind in indices:
@@ -498,9 +508,7 @@ def from_netcdf(
498508

499509
_grid_fb_class = NetcdfFileBuffer
500510

501-
with _grid_fb_class(
502-
lonlat_filename, dimensions, indices, netcdf_engine, netcdf_decodewarning=netcdf_decodewarning
503-
) as filebuffer:
511+
with _grid_fb_class(lonlat_filename, dimensions, indices, netcdf_engine) as filebuffer:
504512
lon, lat = filebuffer.lonlat
505513
indices = filebuffer.indices
506514
# Check if parcels_mesh has been explicitly set in file
@@ -514,7 +522,6 @@ def from_netcdf(
514522
indices,
515523
netcdf_engine,
516524
interp_method=interp_method,
517-
netcdf_decodewarning=netcdf_decodewarning,
518525
) as filebuffer:
519526
filebuffer.name = filebuffer.parse_name(variable[1])
520527
if dimensions["depth"] == "not_yet_set":
@@ -537,7 +544,7 @@ def from_netcdf(
537544
# Concatenate time variable to determine overall dimension
538545
# across multiple files
539546
time, time_origin, timeslices, dataFiles = cls.collect_timeslices(
540-
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
547+
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
541548
)
542549
grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh)
543550
grid.timeslices = timeslices
@@ -546,15 +553,17 @@ def from_netcdf(
546553
# ==== means: the field has a shared grid, but may have different data files, so we need to collect the
547554
# ==== correct file time series again.
548555
_, _, _, dataFiles = cls.collect_timeslices(
549-
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning
556+
timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine
550557
)
551558
kwargs["dataFiles"] = dataFiles
552559

553560
chunksize: bool | None = kwargs.get("chunksize", None)
554561
grid.chunksize = chunksize
555562

556563
if "time" in indices:
557-
logger.warning_once("time dimension in indices is not necessary anymore. It is then ignored.") # type: ignore
564+
warnings.warn(
565+
"time dimension in indices is not necessary anymore. It is then ignored.", FieldSetWarning, stacklevel=2
566+
)
558567

559568
if "full_load" in kwargs: # for backward compatibility with Parcels < v2.0.0
560569
deferred_load = not kwargs["full_load"]
@@ -587,7 +596,6 @@ def from_netcdf(
587596
interp_method=interp_method,
588597
data_full_zdim=data_full_zdim,
589598
chunksize=chunksize,
590-
netcdf_decodewarning=netcdf_decodewarning,
591599
) as filebuffer:
592600
# If Field.from_netcdf is called directly, it may not have a 'data' dimension
593601
# In that case, assume that 'name' is the data dimension
@@ -632,7 +640,6 @@ def from_netcdf(
632640
kwargs["indices"] = indices
633641
kwargs["time_periodic"] = time_periodic
634642
kwargs["netcdf_engine"] = netcdf_engine
635-
kwargs["netcdf_decodewarning"] = netcdf_decodewarning
636643

637644
return cls(
638645
variable,
@@ -820,16 +827,13 @@ def calc_cell_edge_sizes(self):
820827
self.grid.cell_edge_sizes["y"][y, x] = y_conv.to_source(dy, lon, lat, self.grid.depth[0])
821828
self.cell_edge_sizes = self.grid.cell_edge_sizes
822829
else:
823-
logger.error(
830+
raise ValueError(
824831
(
825-
"Field.cell_edge_sizes() not implemented for ",
826-
self.grid.gtype,
827-
"grids.",
828-
"You can provide Field.grid.cell_edge_sizes yourself",
829-
"by in e.g. NEMO using the e1u fields etc from the mesh_mask.nc file",
832+
f"Field.cell_edge_sizes() not implemented for {self.grid.gtype} grids. "
833+
"You can provide Field.grid.cell_edge_sizes yourself by in, e.g., "
834+
"NEMO using the e1u fields etc from the mesh_mask.nc file."
830835
)
831836
)
832-
exit(-1)
833837

834838
def cell_areas(self):
835839
"""Method to calculate cell sizes based on cell_edge_sizes.
@@ -1347,8 +1351,10 @@ def time_index(self, time):
13471351

13481352
def _check_velocitysampling(self):
13491353
if self.name in ["U", "V", "W"]:
1350-
logger.warning_once(
1351-
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully"
1354+
warnings.warn(
1355+
"Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully",
1356+
RuntimeWarning,
1357+
stacklevel=2,
13521358
)
13531359

13541360
def __getitem__(self, key):
@@ -1653,7 +1659,6 @@ def computeTimeChunk(self, data, tindex):
16531659
cast_data_dtype=self.cast_data_dtype,
16541660
rechunk_callback_fields=rechunk_callback_fields,
16551661
chunkdims_name_map=self.netcdf_chunkdims_name_map,
1656-
netcdf_decodewarning=self.netcdf_decodewarning,
16571662
)
16581663
filebuffer.__enter__()
16591664
time_data = filebuffer.time

0 commit comments

Comments
 (0)