11import collections
22import datetime
33import math
4+ import warnings
45from ctypes import POINTER , Structure , c_float , c_int , pointer
56from pathlib import Path
67from typing import TYPE_CHECKING , Iterable , Type
1819 UnitConverter ,
1920 unitconverters_map ,
2021)
21- from parcels .tools .loggers import logger
2222from parcels .tools .statuscodes import (
2323 AllParcelsErrorCodes ,
2424 FieldOutOfBoundError ,
2525 FieldOutOfBoundSurfaceError ,
2626 FieldSamplingError ,
2727 TimeExtrapolationError ,
2828)
29+ from parcels .tools .warnings import FieldSetWarning , _deprecated_param_netcdf_decodewarning
2930
3031from .fieldfilebuffer import (
3132 DaskFileBuffer ,
@@ -163,6 +164,10 @@ def __init__(
163164 to_write = False ,
164165 ** kwargs ,
165166 ):
167+ if kwargs .get ("netcdf_decodewarning" ) is not None :
168+ _deprecated_param_netcdf_decodewarning ()
169+ kwargs .pop ("netcdf_decodewarning" )
170+
166171 if not isinstance (name , tuple ):
167172 self .name = name
168173 self .filebuffername = name
@@ -211,8 +216,10 @@ def __init__(
211216 GridType .RectilinearSGrid ,
212217 GridType .CurvilinearSGrid ,
213218 ]:
214- logger .warning_once ( # type: ignore
215- "General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal."
219+ warnings .warn (
220+ "General s-levels are not supported in B-grid. RectilinearSGrid and CurvilinearSGrid can still be used to deal with shaved cells, but the levels must be horizontal." ,
221+ FieldSetWarning ,
222+ stacklevel = 2 ,
216223 )
217224
218225 self .fieldset : "FieldSet" | None = None
@@ -223,9 +230,10 @@ def __init__(
223230
224231 self .time_periodic = time_periodic
225232 if self .time_periodic is not False and self .allow_time_extrapolation :
226- logger .warning_once ( # type: ignore
227- "allow_time_extrapolation and time_periodic cannot be used together.\n \
228- allow_time_extrapolation is set to False"
233+ warnings .warn (
234+ "allow_time_extrapolation and time_periodic cannot be used together. allow_time_extrapolation is set to False" ,
235+ FieldSetWarning ,
236+ stacklevel = 2 ,
229237 )
230238 self .allow_time_extrapolation = False
231239 if self .time_periodic is True :
@@ -275,9 +283,8 @@ def __init__(
275283 self .dataFiles = np .append (self .dataFiles , self .dataFiles [0 ])
276284 self ._field_fb_class = kwargs .pop ("FieldFileBuffer" , None )
277285 self .netcdf_engine = kwargs .pop ("netcdf_engine" , "netcdf4" )
278- self .netcdf_decodewarning = kwargs .pop ("netcdf_decodewarning" , True )
279- self .loaded_time_indices : Iterable [int ] = []
280- self .creation_log : str = kwargs .pop ("creation_log" , "" )
286+ self .loaded_time_indices : Iterable [int ] = [] # type: ignore
287+ self .creation_log = kwargs .pop ("creation_log" , "" )
281288 self .chunksize = kwargs .pop ("chunksize" , None )
282289 self .netcdf_chunkdims_name_map = kwargs .pop ("chunkdims_name_map" , None )
283290 self .grid .depth_field = kwargs .pop ("depth_field" , None )
@@ -315,8 +322,10 @@ def get_dim_filenames(cls, filenames, dim):
315322
316323 @staticmethod
317324 def collect_timeslices (
318- timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine , netcdf_decodewarning = True
325+ timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine , netcdf_decodewarning = None
319326 ):
327+ if netcdf_decodewarning is not None :
328+ _deprecated_param_netcdf_decodewarning ()
320329 if timestamps is not None :
321330 dataFiles = []
322331 for findex in range (len (data_filenames )):
@@ -329,9 +338,7 @@ def collect_timeslices(
329338 timeslices = []
330339 dataFiles = []
331340 for fname in data_filenames :
332- with _grid_fb_class (
333- fname , dimensions , indices , netcdf_engine = netcdf_engine , netcdf_decodewarning = netcdf_decodewarning
334- ) as filebuffer :
341+ with _grid_fb_class (fname , dimensions , indices , netcdf_engine = netcdf_engine ) as filebuffer :
335342 ftime = filebuffer .time
336343 timeslices .append (ftime )
337344 dataFiles .append ([fname ] * len (ftime ))
@@ -408,7 +415,7 @@ def from_netcdf(
408415 chunksize :
409416 size of the chunks in dask loading
410417 netcdf_decodewarning : bool
411- Whether to show a warning id there is a problem decoding the netcdf files.
418+ (DEPRECATED - v3.1.0) Whether to show a warning if there is a problem decoding the netcdf files.
412419 Default is True, but in some cases where these warnings are expected, it may be useful to silence them
413420 by setting netcdf_decodewarning=False.
414421 grid :
@@ -423,6 +430,10 @@ def from_netcdf(
423430 * `Timestamps <../examples/tutorial_timestamps.ipynb>`__
424431
425432 """
433+ if kwargs .get ("netcdf_decodewarning" ) is not None :
434+ _deprecated_param_netcdf_decodewarning ()
435+ kwargs .pop ("netcdf_decodewarning" )
436+
426437 # Ensure the timestamps array is compatible with the user-provided datafiles.
427438 if timestamps is not None :
428439 if isinstance (filenames , list ):
@@ -475,7 +486,6 @@ def from_netcdf(
475486 depth_filename = depth_filename [0 ]
476487
477488 netcdf_engine = kwargs .pop ("netcdf_engine" , "netcdf4" )
478- netcdf_decodewarning = kwargs .pop ("netcdf_decodewarning" , True )
479489
480490 indices = {} if indices is None else indices .copy ()
481491 for ind in indices :
@@ -498,9 +508,7 @@ def from_netcdf(
498508
499509 _grid_fb_class = NetcdfFileBuffer
500510
501- with _grid_fb_class (
502- lonlat_filename , dimensions , indices , netcdf_engine , netcdf_decodewarning = netcdf_decodewarning
503- ) as filebuffer :
511+ with _grid_fb_class (lonlat_filename , dimensions , indices , netcdf_engine ) as filebuffer :
504512 lon , lat = filebuffer .lonlat
505513 indices = filebuffer .indices
506514 # Check if parcels_mesh has been explicitly set in file
@@ -514,7 +522,6 @@ def from_netcdf(
514522 indices ,
515523 netcdf_engine ,
516524 interp_method = interp_method ,
517- netcdf_decodewarning = netcdf_decodewarning ,
518525 ) as filebuffer :
519526 filebuffer .name = filebuffer .parse_name (variable [1 ])
520527 if dimensions ["depth" ] == "not_yet_set" :
@@ -537,7 +544,7 @@ def from_netcdf(
537544 # Concatenate time variable to determine overall dimension
538545 # across multiple files
539546 time , time_origin , timeslices , dataFiles = cls .collect_timeslices (
540- timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine , netcdf_decodewarning
547+ timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine
541548 )
542549 grid = Grid .create_grid (lon , lat , depth , time , time_origin = time_origin , mesh = mesh )
543550 grid .timeslices = timeslices
@@ -546,15 +553,17 @@ def from_netcdf(
546553 # ==== means: the field has a shared grid, but may have different data files, so we need to collect the
547554 # ==== correct file time series again.
548555 _ , _ , _ , dataFiles = cls .collect_timeslices (
549- timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine , netcdf_decodewarning
556+ timestamps , data_filenames , _grid_fb_class , dimensions , indices , netcdf_engine
550557 )
551558 kwargs ["dataFiles" ] = dataFiles
552559
553560 chunksize : bool | None = kwargs .get ("chunksize" , None )
554561 grid .chunksize = chunksize
555562
556563 if "time" in indices :
557- logger .warning_once ("time dimension in indices is not necessary anymore. It is then ignored." ) # type: ignore
564+ warnings .warn (
565+ "time dimension in indices is not necessary anymore. It is then ignored." , FieldSetWarning , stacklevel = 2
566+ )
558567
559568 if "full_load" in kwargs : # for backward compatibility with Parcels < v2.0.0
560569 deferred_load = not kwargs ["full_load" ]
@@ -587,7 +596,6 @@ def from_netcdf(
587596 interp_method = interp_method ,
588597 data_full_zdim = data_full_zdim ,
589598 chunksize = chunksize ,
590- netcdf_decodewarning = netcdf_decodewarning ,
591599 ) as filebuffer :
592600 # If Field.from_netcdf is called directly, it may not have a 'data' dimension
593601 # In that case, assume that 'name' is the data dimension
@@ -632,7 +640,6 @@ def from_netcdf(
632640 kwargs ["indices" ] = indices
633641 kwargs ["time_periodic" ] = time_periodic
634642 kwargs ["netcdf_engine" ] = netcdf_engine
635- kwargs ["netcdf_decodewarning" ] = netcdf_decodewarning
636643
637644 return cls (
638645 variable ,
@@ -820,16 +827,13 @@ def calc_cell_edge_sizes(self):
820827 self .grid .cell_edge_sizes ["y" ][y , x ] = y_conv .to_source (dy , lon , lat , self .grid .depth [0 ])
821828 self .cell_edge_sizes = self .grid .cell_edge_sizes
822829 else :
823- logger . error (
830+ raise ValueError (
824831 (
825- "Field.cell_edge_sizes() not implemented for " ,
826- self .grid .gtype ,
827- "grids." ,
828- "You can provide Field.grid.cell_edge_sizes yourself" ,
829- "by in e.g. NEMO using the e1u fields etc from the mesh_mask.nc file" ,
832+ f"Field.cell_edge_sizes() not implemented for { self .grid .gtype } grids. "
833+ "You can provide Field.grid.cell_edge_sizes yourself by in, e.g., "
834+ "NEMO using the e1u fields etc from the mesh_mask.nc file."
830835 )
831836 )
832- exit (- 1 )
833837
834838 def cell_areas (self ):
835839 """Method to calculate cell sizes based on cell_edge_sizes.
@@ -1347,8 +1351,10 @@ def time_index(self, time):
13471351
13481352 def _check_velocitysampling (self ):
13491353 if self .name in ["U" , "V" , "W" ]:
1350- logger .warning_once (
1351- "Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully"
1354+ warnings .warn (
1355+ "Sampling of velocities should normally be done using fieldset.UV or fieldset.UVW object; tread carefully" ,
1356+ RuntimeWarning ,
1357+ stacklevel = 2 ,
13521358 )
13531359
13541360 def __getitem__ (self , key ):
@@ -1653,7 +1659,6 @@ def computeTimeChunk(self, data, tindex):
16531659 cast_data_dtype = self .cast_data_dtype ,
16541660 rechunk_callback_fields = rechunk_callback_fields ,
16551661 chunkdims_name_map = self .netcdf_chunkdims_name_map ,
1656- netcdf_decodewarning = self .netcdf_decodewarning ,
16571662 )
16581663 filebuffer .__enter__ ()
16591664 time_data = filebuffer .time
0 commit comments