@@ -49,6 +49,7 @@ class Config:
4949 _alpha_expo :float = DefaultProperties .init_learning_rate_expo # Initial learning rate exponent (base 10)
5050 _lr_decay :float = DefaultProperties .learning_rate_decay # Learning rate decay parameter.
5151 _batch_expo :int = DefaultProperties .batch_size_exponent # Mini-batch size exponent (base 2).
52+ _n_epochs :int = DefaultProperties .N_epochs # Maximum number of training epochs.
5253 _hidden_layer_architecture :list [int ] = DefaultProperties .hidden_layer_architecture # Hidden layer perceptron count.
5354 _activation_function :str = DefaultProperties .activation_function # Hidden layer activation function name.
5455
@@ -105,7 +106,7 @@ def GetOutputDir(self):
105106 def SetConcatenationFileHeader (self , header :str = DefaultProperties .output_file_header ):
106107 """Define the file name header of the processed fluid manifold data.
107108
108- :param header: manifold data file header, defaults to DefaultProperties.output_file_header
109+ :param header: manifold data file header, defaults to "fluid_data"
109110 :type header: str, optional
110111 """
111112
@@ -142,7 +143,7 @@ def GetConfigName(self):
142143 return self ._config_name
143144
144145 def SetControllingVariables (self , names_cv :list [str ]):
145- """Define the set of controlling variables used for defining the manifold .
146+ """Define the set of controlling variable names used as inputs for the networks of the data-driven fluid model .
146147
147148 :param names_cv: list with controlling variable names.
148149 :type names_cv: list[str]
@@ -155,7 +156,7 @@ def SetControllingVariables(self, names_cv:list[str]):
155156 return
156157
157158 def GetControllingVariables (self ):
158- """Get the controlling variables used for defining the manifold .
159+ """Retrieve the set of controlling variable names used as inputs for the networks of the data-driven fluid model .
159160
160161 :return: list of controlling variable names.
161162 :rtype: list[str]
@@ -166,7 +167,7 @@ def GetControllingVariables(self):
166167 def SetTrainFraction (self , input :float = DefaultProperties .train_fraction ):
167168 """Define the fraction of fluid data used for MLP training.
168169
169- :param input: fluid data train fraction, defaults to DefaultProperties.train_fraction
170+ :param input: fluid data train fraction, defaults to 0.8
170171 :type input: float, optional
171172 :raises Exception: if provided value lies outside 0-1.
172173 """
@@ -180,7 +181,7 @@ def SetTrainFraction(self, input:float=DefaultProperties.train_fraction):
180181 def SetTestFraction (self , input :float = DefaultProperties .test_fraction ):
181182 """Define the fraction of fluid data used for MLP prediction accuracy evaluation.
182183
183- :param input: fluid data test set fraction, defaults to DefaultProperties.test_fraction
184+ :param input: fluid data test set fraction, defaults to 0.1
184185 :type input: float, optional
185186 :raises Exception: if provided value lies outside 0-1.
186187 """
@@ -220,7 +221,7 @@ def GetAlphaExpo(self):
220221 def SetAlphaExpo (self , alpha_expo_in :float = DefaultProperties .init_learning_rate_expo ):
221222 """Define the initial learning rate exponent (base 10).
222223
223- :param alpha_expo_in: log10 of initial learning rate, defaults to DefaultProperties.init_learning_rate_expo
224+ :param alpha_expo_in: log10 of initial learning rate, defaults to -1.8269
224225 :type alpha_expo_in: float, optional
225226 :raises Exception: if provided value is positive.
226227 """
@@ -251,8 +252,29 @@ def SetLRDecay(self, lr_decay_in:float=DefaultProperties.learning_rate_decay):
251252 self ._lr_decay = lr_decay_in
252253 return
253254
255+ def SetNEpochs (self , n_epochs_in :int = DefaultProperties .N_epochs ):
256+ """Specify the maximum number of epochs for training of the networks.
257+
258+ :param n_epochs_in: maximum number of epochs, defaults to 1000
259+ :type n_epochs_in: int, optional
260+ :raises Exception: if the number is lower than 1.
261+ """
262+ if n_epochs_in < 1 :
263+ raise Exception ("Number of epochs should be higher than 1" )
264+
265+ self ._n_epochs = int (n_epochs_in )
266+ return
267+
268+ def GetNEpochs (self ):
269+ """Retrieve the maximum number of epochs the networks are trained for.
270+
271+ :return: maximum number of training epochs.
272+ :rtype: int
273+ """
274+ return self ._n_epochs
275+
254276 def SetBatchExpo (self , batch_expo_in :int = DefaultProperties .batch_size_exponent ):
255- """Set the mini-batch size exponent for MLP training.
277+ """Set the mini-batch size exponent (base 2) for MLP training.
256278
257279 :param batch_expo_in: Mini-batch size exponent (base 2) used for MLP training, defaults to DefaultProperties.batch_size_exponent
258280 :type batch_expo_in: int, optional
@@ -272,13 +294,27 @@ def GetBatchExpo(self):
272294 """
273295 return self ._batch_expo
274296
297+ def __HiddenLayerChecks (self ,hidden_layer_architecture :list [int ]):
298+ if not hidden_layer_architecture :
299+ raise Exception ("At least one hidden layer should be specified." )
300+ for h in hidden_layer_architecture :
301+ if h < 1 :
302+ raise Exception ("Number of nodes in the hidden layers should be positive." )
303+ if type (h ) is not int :
304+ raise Exception ("Nodes in the hidden layers should be integers." )
305+ return
306+
275307 def SetHiddenLayerArchitecture (self , hidden_layer_architecture :list [int ]= DefaultProperties .hidden_layer_architecture ):
276308 """
277309 Define the hidden layer architecture of the multi-layer perceptron used for the MLP-based manifold.
278310
279- :param hidden_layer_architecture: listed neuron count per hidden layer, defaults to DefaultProperties.hidden_layer_architecture
311+ :param hidden_layer_architecture: listed neuron count per hidden layer, defaults to [20,20,20]
280312 :type hidden_layer_architecture: list[int], optional
313+ :raises Exception: if an empty list is provided or if input contains non-integer data or the number of nodes is less than 1.
281314 """
315+
316+ self .__HiddenLayerChecks (hidden_layer_architecture )
317+
282318 self ._hidden_layer_architecture = []
283319 for n in hidden_layer_architecture :
284320 self ._hidden_layer_architecture .append (n )
@@ -292,33 +328,57 @@ def GetHiddenLayerArchitecture(self):
292328 """
293329 return self ._hidden_layer_architecture
294330
331+ def __WeightsCheck (self , weights :list [np .ndarray [float ]]):
332+ if not weights :
333+ raise Exception ("Weights list should contain at least one array." )
334+ for i in range (len (weights )- 1 ):
335+ w_i = weights [i ]
336+ w_ip = weights [i + 1 ]
337+ if np .shape (w_i )[1 ] != np .shape (w_ip )[0 ]:
338+ raise Exception ("Weight arrays are improperly formatted. Check rows and columns." )
339+ return
340+
341+
295342 def SetWeights (self , weights :list [np .ndarray [float ]]):
296343 """Store the weight values of the neural network.
297344
298345 :param weights: weight arrays for the network hidden layers.
299346 :type weights: list[np.ndarray[float]]
347+ :raises: Exception: if an empty list is provided or the weights arrays are improperly formatted.
300348 """
349+ self .__WeightsCheck (weights )
301350
302351 self ._MLP_weights = []
303352 for w in weights :
304353 self ._MLP_weights .append (w )
305354 return
306355
356+ def __BiasesCheck (self , biases :list [np .ndarray [float ]]):
357+ if not biases :
358+ raise Exception ("Biases list should contain at least one entry." )
359+ for b in biases :
360+ if b .size == 0 :
361+ raise Exception ("Biases for hidden layers should contain at least one value." )
362+ return
363+
307364 def SetBiases (self , biases :list [np .ndarray [float ]]):
308365 """Store the bias values of the neural network.
309366
310367 :param weights: bias arrays for the network hidden layers.
311368 :type weights: list[np.ndarray[float]]
369+ :raises: Exception: if an empty list is provided or contains empty arrays.
312370 """
371+ self .__BiasesCheck (biases )
372+
313373 self ._MLP_biases = []
314374 for w in biases :
315375 self ._MLP_biases .append (w )
316376 return
317377
318378 def SetActivationFunction (self , activation_function_in :str = DefaultProperties .activation_function ):
319- """Define the hidden layer activation function for the MLP-based manifold.
379+ """Define the hidden layer activation function for the MLP-based manifold. See Common.Properties.ActivationFunctionOptions for the supported options.
320380
321- :param activation_function_in: hidden layer activation function name, defaults to DefaultProperties.activation_function
381+ :param activation_function_in: hidden layer activation function name, defaults to "gelu"
322382 :type activation_function_in: str, optional
323383 :raises Exception: if the provided name does not appear in the list of available activation function options.
324384 """
@@ -346,6 +406,7 @@ def UpdateMLPHyperParams(self, trainer):
346406 self ._alpha_expo = trainer .alpha_expo
347407 self ._lr_decay = trainer .lr_decay
348408 self ._batch_expo = trainer .batch_expo
409+ self ._n_epochs = trainer .n_epochs
349410 self ._hidden_layer_architecture = trainer .architecture .copy ()
350411 self ._activation_function = trainer .activation_function
351412
@@ -361,7 +422,7 @@ def GetWeightsBiases(self):
361422 """Return values for weights and biases for the hidden layers in the MLP.
362423
363424 :return: weight arrays, biases arrays
364- :rtype: list[ np.ndarray[float]]
425+ :rtype: tuple( np.ndarray[float])
365426 """
366427 return self ._MLP_weights , self ._MLP_biases
367428
0 commit comments