Skip to content

Commit 78944df

Browse files
committed
Fix fmt
1 parent d0e238b commit 78944df

File tree

1 file changed

+2
-2
lines changed
  • src/machine_learning/optimization

1 file changed

+2
-2
lines changed

src/machine_learning/optimization/adamw.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ mod tests {
142142
#[test]
143143
fn test_adamw_step_iteratively_until_convergence() {
144144
let gradients = vec![1.0, 2.0, 3.0, 4.0];
145-
145+
146146
// High learning rate and weight decay to force massive movement quickly
147147
let mut optimizer = AdamW::new(Some(0.1), None, None, Some(0.01), 4);
148148
let mut model_params = vec![5.0; 4];
@@ -151,7 +151,7 @@ mod tests {
151151
optimizer.step(&mut model_params, &gradients);
152152
}
153153

154-
// Because the gradient is constantly pushing positive, and the weight decay
154+
// Because the gradient is constantly pushing positive, and the weight decay
155155
// is pushing towards zero, the parameters should be pushed negatively from 5.0
156156
// and eventually find a stable equilibrium.
157157
assert!(model_params[0] < 5.0);

0 commit comments

Comments
 (0)