You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: deepmd/utils/argcheck.py
+9-9Lines changed: 9 additions & 9 deletions
Original file line number
Diff line number
Diff line change
@@ -50,7 +50,7 @@ def type_embedding_args():
50
50
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
51
51
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
52
52
doc_seed="Random seed for parameter initialization"
53
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
53
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
54
54
doc_precision=f"The precision of the embedding net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
55
55
doc_trainable="If the parameters in the embedding net are trainable"
56
56
@@ -162,7 +162,7 @@ def descrpt_se_a_args():
162
162
doc_rcut_smth="Where to start smoothing. For example the 1/r term is smoothed from `rcut` to `rcut_smth`"
163
163
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
164
164
doc_axis_neuron="Size of the submatrix of G (embedding matrix)."
165
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
165
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
166
166
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
167
167
doc_type_one_side=r"If true, the embedding network parameters vary by types of neighbor atoms only, so there will be $N_\text{types}$ sets of embedding network parameters. Otherwise, the embedding network parameters vary by types of centric atoms and types of neighbor atoms, so there will be $N_\text{types}^2$ sets of embedding network parameters."
168
168
doc_precision=f"The precision of the embedding net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
@@ -215,7 +215,7 @@ def descrpt_se_t_args():
215
215
doc_rcut="The cut-off radius."
216
216
doc_rcut_smth="Where to start smoothing. For example the 1/r term is smoothed from `rcut` to `rcut_smth`"
217
217
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
218
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
218
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
219
219
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
220
220
doc_precision=f"The precision of the embedding net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
221
221
doc_trainable="If the parameters in the embedding net are trainable"
@@ -265,7 +265,7 @@ def descrpt_se_r_args():
265
265
doc_rcut="The cut-off radius."
266
266
doc_rcut_smth="Where to start smoothing. For example the 1/r term is smoothed from `rcut` to `rcut_smth`"
267
267
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
268
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
268
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
269
269
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
270
270
doc_type_one_side=r"If true, the embedding network parameters vary by types of neighbor atoms only, so there will be $N_\text{types}$ sets of embedding network parameters. Otherwise, the embedding network parameters vary by types of centric atoms and types of neighbor atoms, so there will be $N_\text{types}^2$ sets of embedding network parameters."
271
271
doc_precision=f"The precision of the embedding net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
@@ -319,7 +319,7 @@ def descrpt_se_atten_args():
319
319
doc_rcut_smth="Where to start smoothing. For example the 1/r term is smoothed from `rcut` to `rcut_smth`"
320
320
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
321
321
doc_axis_neuron="Size of the submatrix of G (embedding matrix)."
322
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
322
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
323
323
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
324
324
doc_type_one_side=r"If true, the embedding network parameters vary by types of neighbor atoms only, so there will be $N_\text{types}$ sets of embedding network parameters. Otherwise, the embedding network parameters vary by types of centric atoms and types of neighbor atoms, so there will be $N_\text{types}^2$ sets of embedding network parameters."
325
325
doc_precision=f"The precision of the embedding net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
@@ -376,7 +376,7 @@ def descrpt_se_a_mask_args():
376
376
377
377
doc_neuron="Number of neurons in each hidden layers of the embedding net. When two layers are of the same size or one layer is twice as large as the previous layer, a skip connection is built."
378
378
doc_axis_neuron="Size of the submatrix of G (embedding matrix)."
379
-
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
379
+
doc_activation_function=f'The activation function in the embedding net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
380
380
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
381
381
doc_type_one_side=r"If true, the embedding network parameters vary by types of neighbor atoms only, so there will be $N_\text{types}$ sets of embedding network parameters. Otherwise, the embedding network parameters vary by types of centric atoms and types of neighbor atoms, so there will be $N_\text{types}^2$ sets of embedding network parameters."
382
382
doc_exclude_types="The excluded pairs of types which have no interaction with each other. For example, `[[0, 1]]` means no interaction between type 0 and type 1."
@@ -441,7 +441,7 @@ def fitting_ener():
441
441
doc_numb_fparam="The dimension of the frame parameter. If set to >0, file `fparam.npy` should be included to provided the input fparams."
442
442
doc_numb_aparam="The dimension of the atomic parameter. If set to >0, file `aparam.npy` should be included to provided the input aparams."
443
443
doc_neuron="The number of neurons in each hidden layers of the fitting net. When two hidden layers are of the same size, a skip connection is built."
444
-
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
444
+
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
445
445
doc_precision=f"The precision of the fitting net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
446
446
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
447
447
doc_trainable="Whether the parameters in the fitting net are trainable. This option can be\n\n\
@@ -502,7 +502,7 @@ def fitting_ener():
502
502
503
503
deffitting_polar():
504
504
doc_neuron="The number of neurons in each hidden layers of the fitting net. When two hidden layers are of the same size, a skip connection is built."
505
-
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
505
+
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
506
506
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
507
507
doc_precision=f"The precision of the fitting net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
508
508
doc_scale="The output of the fitting net (polarizability matrix) will be scaled by ``scale``"
@@ -553,7 +553,7 @@ def fitting_polar():
553
553
554
554
deffitting_dipole():
555
555
doc_neuron="The number of neurons in each hidden layers of the fitting net. When two hidden layers are of the same size, a skip connection is built."
556
-
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version.'
556
+
doc_activation_function=f'The activation function in the fitting net. Supported activation functions are {list_to_doc(ACTIVATION_FN_DICT.keys())} Note that "gelu" denotes the custom operator version, and "gelu_tf" denotes the TF standard version. If you set "None" or "none" here, no activation function will be used.'
557
557
doc_resnet_dt='Whether to use a "Timestep" in the skip connection'
558
558
doc_precision=f"The precision of the fitting net parameters, supported options are {list_to_doc(PRECISION_DICT.keys())} Default follows the interface precision."
559
559
doc_sel_type="The atom types for which the atomic dipole will be provided. If not set, all types will be selected."
0 commit comments