Skip to content

Commit 32d92f5

Browse files
2 parents 1c8a26b + 74a80a6 commit 32d92f5

57 files changed

Lines changed: 10404 additions & 60 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
Documentation/html/*
2-
_site/*
2+
_site/*

Common/.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__pycache__/*
1+
__pycache__/*

Common/Config_base.py

Lines changed: 72 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ class Config:
4949
_alpha_expo:float = DefaultProperties.init_learning_rate_expo # Initial learning rate exponent (base 10)
5050
_lr_decay:float = DefaultProperties.learning_rate_decay # Learning rate decay parameter.
5151
_batch_expo:int = DefaultProperties.batch_size_exponent # Mini-batch size exponent (base 2).
52+
_n_epochs:int = DefaultProperties.N_epochs # Maximum number of training epochs.
5253
_hidden_layer_architecture:list[int] = DefaultProperties.hidden_layer_architecture # Hidden layer perceptron count.
5354
_activation_function:str = DefaultProperties.activation_function # Hidden layer activation function name.
5455

@@ -105,7 +106,7 @@ def GetOutputDir(self):
105106
def SetConcatenationFileHeader(self, header:str=DefaultProperties.output_file_header):
106107
"""Define the file name header of the processed fluid manifold data.
107108
108-
:param header: manifold data file header, defaults to DefaultProperties.output_file_header
109+
:param header: manifold data file header, defaults to "fluid_data"
109110
:type header: str, optional
110111
"""
111112

@@ -142,7 +143,7 @@ def GetConfigName(self):
142143
return self._config_name
143144

144145
def SetControllingVariables(self, names_cv:list[str]):
145-
"""Define the set of controlling variables used for defining the manifold.
146+
"""Define the set of controlling variable names used as inputs for the networks of the data-driven fluid model.
146147
147148
:param names_cv: list with controlling variable names.
148149
:type names_cv: list[str]
@@ -155,7 +156,7 @@ def SetControllingVariables(self, names_cv:list[str]):
155156
return
156157

157158
def GetControllingVariables(self):
158-
"""Get the controlling variables used for defining the manifold.
159+
"""Retrieve the set of controlling variable names used as inputs for the networks of the data-driven fluid model.
159160
160161
:return: list of controlling variable names.
161162
:rtype: list[str]
@@ -166,7 +167,7 @@ def GetControllingVariables(self):
166167
def SetTrainFraction(self, input:float=DefaultProperties.train_fraction):
167168
"""Define the fraction of fluid data used for MLP training.
168169
169-
:param input: fluid data train fraction, defaults to DefaultProperties.train_fraction
170+
:param input: fluid data train fraction, defaults to 0.8
170171
:type input: float, optional
171172
:raises Exception: if provided value lies outside 0-1.
172173
"""
@@ -180,7 +181,7 @@ def SetTrainFraction(self, input:float=DefaultProperties.train_fraction):
180181
def SetTestFraction(self, input:float=DefaultProperties.test_fraction):
181182
"""Define the fraction of fluid data used for MLP prediction accuracy evaluation.
182183
183-
:param input: fluid data test set fraction, defaults to DefaultProperties.test_fraction
184+
:param input: fluid data test set fraction, defaults to 0.1
184185
:type input: float, optional
185186
:raises Exception: if provided value lies outside 0-1.
186187
"""
@@ -220,7 +221,7 @@ def GetAlphaExpo(self):
220221
def SetAlphaExpo(self, alpha_expo_in:float=DefaultProperties.init_learning_rate_expo):
221222
"""Define the initial learning rate exponent (base 10).
222223
223-
:param alpha_expo_in: log10 of initial learning rate, defaults to DefaultProperties.init_learning_rate_expo
224+
:param alpha_expo_in: log10 of initial learning rate, defaults to -1.8269
224225
:type alpha_expo_in: float, optional
225226
:raises Exception: if provided value is positive.
226227
"""
@@ -251,8 +252,29 @@ def SetLRDecay(self, lr_decay_in:float=DefaultProperties.learning_rate_decay):
251252
self._lr_decay = lr_decay_in
252253
return
253254

255+
def SetNEpochs(self, n_epochs_in:int=DefaultProperties.N_epochs):
256+
"""Specify the maximum number of epochs for training of the networks.
257+
258+
:param n_epochs_in: maximum number of epochs, defaults to 1000
259+
:type n_epochs_in: int, optional
260+
:raises Exception: if the number is lower than 1.
261+
"""
262+
if n_epochs_in < 1:
263+
raise Exception("Number of epochs should be higher than 1")
264+
265+
self._n_epochs = int(n_epochs_in)
266+
return
267+
268+
def GetNEpochs(self):
269+
"""Retrieve the maximum number of epochs the networks are trained for.
270+
271+
:return: maximum number of training epochs.
272+
:rtype: int
273+
"""
274+
return self._n_epochs
275+
254276
def SetBatchExpo(self, batch_expo_in:int=DefaultProperties.batch_size_exponent):
255-
"""Set the mini-batch size exponent for MLP training.
277+
"""Set the mini-batch size exponent (base 2) for MLP training.
256278
257279
:param batch_expo_in: Mini-batch size exponent (base 2) used for MLP training, defaults to DefaultProperties.batch_size_exponent
258280
:type batch_expo_in: int, optional
@@ -272,13 +294,27 @@ def GetBatchExpo(self):
272294
"""
273295
return self._batch_expo
274296

297+
def __HiddenLayerChecks(self,hidden_layer_architecture:list[int]):
298+
if not hidden_layer_architecture:
299+
raise Exception("At least one hidden layer should be specified.")
300+
for h in hidden_layer_architecture:
301+
if h < 1:
302+
raise Exception("Number of nodes in the hidden layers should be positive.")
303+
if type(h) is not int:
304+
raise Exception("Nodes in the hidden layers should be integers.")
305+
return
306+
275307
def SetHiddenLayerArchitecture(self, hidden_layer_architecture:list[int]=DefaultProperties.hidden_layer_architecture):
276308
"""
277309
Define the hidden layer architecture of the multi-layer perceptron used for the MLP-based manifold.
278310
279-
:param hidden_layer_architecture: listed neuron count per hidden layer, defaults to DefaultProperties.hidden_layer_architecture
311+
:param hidden_layer_architecture: listed neuron count per hidden layer, defaults to [20,20,20]
280312
:type hidden_layer_architecture: list[int], optional
313+
:raises Exception: if an empty list is provided or if input contains non-integer data or the number of nodes is less than 1.
281314
"""
315+
316+
self.__HiddenLayerChecks(hidden_layer_architecture)
317+
282318
self._hidden_layer_architecture = []
283319
for n in hidden_layer_architecture:
284320
self._hidden_layer_architecture.append(n)
@@ -292,33 +328,57 @@ def GetHiddenLayerArchitecture(self):
292328
"""
293329
return self._hidden_layer_architecture
294330

331+
def __WeightsCheck(self, weights:list[np.ndarray[float]]):
332+
if not weights:
333+
raise Exception("Weights list should contain at least one array.")
334+
for i in range(len(weights)-1):
335+
w_i = weights[i]
336+
w_ip = weights[i+1]
337+
if np.shape(w_i)[1] != np.shape(w_ip)[0]:
338+
raise Exception("Weight arrays are improperly formatted. Check rows and columns.")
339+
return
340+
341+
295342
def SetWeights(self, weights:list[np.ndarray[float]]):
296343
"""Store the weight values of the neural network.
297344
298345
:param weights: weight arrays for the network hidden layers.
299346
:type weights: list[np.ndarray[float]]
347+
:raises: Exception: if an empty list is provided or the weights arrays are improperly formatted.
300348
"""
349+
self.__WeightsCheck(weights)
301350

302351
self._MLP_weights = []
303352
for w in weights:
304353
self._MLP_weights.append(w)
305354
return
306355

356+
def __BiasesCheck(self, biases:list[np.ndarray[float]]):
357+
if not biases:
358+
raise Exception("Biases list should contain at least one entry.")
359+
for b in biases:
360+
if b.size == 0:
361+
raise Exception("Biases for hidden layers should contain at least one value.")
362+
return
363+
307364
def SetBiases(self, biases:list[np.ndarray[float]]):
308365
"""Store the bias values of the neural network.
309366
310367
:param weights: bias arrays for the network hidden layers.
311368
:type weights: list[np.ndarray[float]]
369+
:raises: Exception: if an empty list is provided or contains empty arrays.
312370
"""
371+
self.__BiasesCheck(biases)
372+
313373
self._MLP_biases = []
314374
for w in biases:
315375
self._MLP_biases.append(w)
316376
return
317377

318378
def SetActivationFunction(self, activation_function_in:str=DefaultProperties.activation_function):
319-
"""Define the hidden layer activation function for the MLP-based manifold.
379+
"""Define the hidden layer activation function for the MLP-based manifold. See Common.Properties.ActivationFunctionOptions for the supported options.
320380
321-
:param activation_function_in: hidden layer activation function name, defaults to DefaultProperties.activation_function
381+
:param activation_function_in: hidden layer activation function name, defaults to "gelu"
322382
:type activation_function_in: str, optional
323383
:raises Exception: if the provided name does not appear in the list of available activation function options.
324384
"""
@@ -346,6 +406,7 @@ def UpdateMLPHyperParams(self, trainer):
346406
self._alpha_expo = trainer.alpha_expo
347407
self._lr_decay = trainer.lr_decay
348408
self._batch_expo = trainer.batch_expo
409+
self._n_epochs = trainer.n_epochs
349410
self._hidden_layer_architecture = trainer.architecture.copy()
350411
self._activation_function = trainer.activation_function
351412

@@ -361,7 +422,7 @@ def GetWeightsBiases(self):
361422
"""Return values for weights and biases for the hidden layers in the MLP.
362423
363424
:return: weight arrays, biases arrays
364-
:rtype: list[np.ndarray[float]]
425+
:rtype: tuple(np.ndarray[float])
365426
"""
366427
return self._MLP_weights, self._MLP_biases
367428

Common/DataDrivenConfig.py

Lines changed: 46 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@
3232
import pickle
3333
import CoolProp
3434
import cantera as ct
35+
from CoolProp.CoolProp import get_global_param_string
36+
supported_fluid_names = get_global_param_string("FluidsList").split(',')
3537

3638
#---------------------------------------------------------------------------------------------#
3739
# Importing DataMiner classes and functions
@@ -85,7 +87,7 @@ class Config_NICFD(Config):
8587
_config_type = DefaultSettings_NICFD.config_type
8688

8789
def __init__(self, load_file:str=None):
88-
"""EntropicAI SU2 DataMiner configuration class.
90+
"""SU2 DataMiner configuration class for real-gas applications.
8991
9092
:param load_file: configuration file name to load, defaults to None
9193
:type load_file: str, optional
@@ -152,9 +154,9 @@ def PrintBanner(self):
152154
return
153155

154156

155-
def SetFluid(self, fluid_name):
157+
def SetFluid(self, fluid_name=DefaultSettings_NICFD.fluid_name):
156158
"""
157-
Define the fluid name used for entropic data generation. By default, \"MM\" is used.
159+
Define the fluid name used for entropic data generation. By default, "Air" is used. Specify a list of fluids for mixtures.
158160
159161
:param fluid_name: CoolProp fluid name or list of names.
160162
:type fluid_name: str or list[str]
@@ -170,11 +172,17 @@ def SetFluid(self, fluid_name):
170172
self.__fluid_names = []
171173
fluid_mixing = []
172174
for f in fluid_name:
175+
if type(f) is not str:
176+
raise Exception("Fluid name should be provided in string format.")
177+
if f not in supported_fluid_names:
178+
raise Exception("Fluid name should be one of the following: %s" % (", ".join(q for q in supported_fluid_names)))
173179
self.__fluid_names.append(f)
174180
if len(self.__fluid_mole_fractions) == 0:
175181
self.__fluid_mole_fractions = np.ones(len(self.__fluid_names))/len(self.__fluid_names)
176182

177183
elif type(fluid_name) == str:
184+
if fluid_name not in supported_fluid_names:
185+
raise Exception("Fluid name should be one of the following: %s" % (", ".join(q for q in supported_fluid_names)))
178186
self.__fluid_names = [fluid_name]
179187

180188
fluid_string = "&".join(f for f in self.__fluid_names)
@@ -186,10 +194,26 @@ def SetFluid(self, fluid_name):
186194
return
187195

188196
def SetEquationOfState(self, EOS_type_in:str=DefaultSettings_NICFD.EOS_type):
189-
self.__EOS_type=EOS_type_in
197+
"""Define the equation of state backend used by CoolProp to generate fluid data.
198+
199+
:param EOS_type_in: backend used by CoolProp, defaults to "HEOS"
200+
:type EOS_type_in: str, optional
201+
:raises Exception: if the specified backend is not supported.
202+
"""
203+
if type(EOS_type_in) is not str:
204+
raise Exception("Equation of state should be provided in string format.")
205+
if EOS_type_in.upper() not in DefaultSettings_NICFD.supported_backends:
206+
raise Exception("Equation of state not supported, should be one of the following : %s" % ", ".join(e for e in DefaultSettings_NICFD.supported_backends))
207+
208+
self.__EOS_type=EOS_type_in.upper()
190209
return
191210

192211
def GetEquationOfState(self):
212+
"""Retrieve the equation of state backend used by CoolProp for fluid data calculations.
213+
214+
:return: name of the equation of state model.
215+
:rtype: str
216+
"""
193217
return self.__EOS_type
194218

195219
def SetFluidMoleFractions(self, mole_fractions:list[float]):
@@ -215,8 +239,7 @@ def SetFluidMoleFractions(self, mole_fractions:list[float]):
215239
return
216240

217241
def GetFluid(self):
218-
"""
219-
Get the fluid used for entropic data generation.
242+
"""Get the name of the fluid for which thermodynamic data is generated.
220243
:return: fluid name
221244
:rtype: str
222245
@@ -230,12 +253,12 @@ def GetMoleFractions(self):
230253
return self.__fluid_mole_fractions.copy()
231254

232255
def UseAutoRange(self, use_auto_range:bool=True):
233-
"""Automatically determine fluid data range based on available fluid properties.
256+
"""Automatically determine the span of the thermodynamic state space for which fluid data are generated.
234257
235258
:param use_auto_range: automatically set fluid data range, defaults to True
236259
:type use_auto_range: bool, optional
237260
"""
238-
self.__use_auto_range = use_auto_range
261+
self.__use_auto_range = bool(use_auto_range)
239262
return
240263

241264
def GetAutoRange(self):
@@ -249,10 +272,10 @@ def GetAutoRange(self):
249272
def UsePTGrid(self, PT_grid:bool=DefaultSettings_NICFD.use_PT_grid):
250273
"""Define fluid data grid in the pressure-temperature space. If not, the fluid data grid is defined in the density-energy space.
251274
252-
:param PT_grid: use pressure-temperature based grid, defaults to DefaultSettings_NICFD.use_PT_grid
275+
:param PT_grid: use pressure-temperature based grid, defaults to False
253276
:type PT_grid: bool, optional
254277
"""
255-
self.__use_PT = PT_grid
278+
self.__use_PT = bool(PT_grid)
256279
return
257280

258281
def GetPTGrid(self):
@@ -265,14 +288,16 @@ def GetPTGrid(self):
265288
return self.__use_PT
266289

267290
def SetTemperatureBounds(self, T_lower:float=DefaultSettings_NICFD.T_min, T_upper:float=DefaultSettings_NICFD.T_max):
268-
"""Set the upper and lower temperature limits for the fluid data grid.
291+
"""Set the upper and lower temperature limits between which fluid data are generated.
269292
270293
:param T_lower: lower temperature limit in Kelvin.
271294
:type T_lower: float
272295
:param T_upper: upper temperature limit in Kelvin.
273296
:type T_upper: float
274297
:raises Exception: if lower temperature limit exceeds upper temperature limit.
275298
"""
299+
if (T_lower <=0 or T_upper <=0):
300+
raise Exception("Temperature values should be positive.")
276301
if (T_lower >= T_upper):
277302
raise Exception("Lower temperature should be lower than upper temperature.")
278303
else:
@@ -1843,7 +1868,7 @@ def AddOutputGroup(self, variable_names_in:list[str]):
18431868
self.__activation_function.append(DefaultSettings_FGM.activation_function)
18441869
return
18451870

1846-
def DefineOutputGroup(self, i_group:int, variable_names_in:list[str]):
1871+
def EditOutputGroup(self, i_group:int, variable_names_in:list[str]):
18471872
"""Re-define the variables in a specific MLP output group.
18481873
18491874
:param i_group: MLP output group index to adapt.
@@ -1864,7 +1889,13 @@ def DefineOutputGroup(self, i_group:int, variable_names_in:list[str]):
18641889
return
18651890

18661891
def RemoveOutputGroup(self, i_group:int):
1867-
if i_group > len(self.__MLP_output_groups):
1892+
"""Remove one of the MLP output groups.
1893+
1894+
:param i_group: MLP output group index to remove.
1895+
:type i_group: int
1896+
:raises Exception: if group index is outside the range of stored groups.
1897+
"""
1898+
if (i_group > len(self.__MLP_output_groups)) or (i_group<0):
18681899
raise Exception("Group not present in MLP outputs.")
18691900
print("Removing output group %i: %s" % (i_group, ",".join(s for s in self.__MLP_output_groups[i_group-1])))
18701901
self.__MLP_output_groups.remove(self.__MLP_output_groups[i_group-1])
@@ -1954,12 +1985,14 @@ def GetWeightsBiases(self, i_group:int=0):
19541985
def SetWeights(self, weights: list[np.ndarray[float]], i_group:int=0):
19551986
self._MLP_weights[i_group] = []
19561987
for w in weights:
1988+
self.__WeightsCheck(w)
19571989
self._MLP_weights[i_group].append(w)
19581990
return
19591991

19601992
def SetBiases(self, biases:list[np.ndarray[float]], i_group:int=0):
19611993
self._MLP_biases[i_group] = []
19621994
for w in biases:
1995+
self.__BiasesCheck(w)
19631996
self._MLP_biases[i_group].append(w)
19641997
return
19651998

Common/Properties.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,8 @@ class DefaultSettings_NICFD(DefaultProperties):
151151
activation_function:str = "exponential"
152152
config_type:str = "EntropicAI"
153153
supported_state_vars:list[str] = ["s","T","p","c2","dTdrho_e","dTde_rho","dpdrho_e","dpde_rho"]
154-
154+
supported_backends:list[str] = ["HEOS","PR", "SRK", "IF97","REFPROP"]
155+
155156
class DefaultSettings_FGM(DefaultProperties):
156157
config_name:str = "config_FGM"
157158

Data_Generation/.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__pycache__/*
1+
__pycache__/*

Data_Processing/.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__pycache__/*
1+
__pycache__/*

0 commit comments

Comments
 (0)