Skip to content

Commit 9443469

Browse files
Fix flaky TestLoadJIT_3 by relaxing allclose tolerances (#1527)
The 1000->100 linear layer sums 1000 float32 values per output neuron. Under varying CPU load, parallel BLAS libraries change reduction thread scheduling, altering FP summation order and exceeding the tight default tolerances (rtol=1e-5, atol=1e-8). Relax to rtol=1e-4, atol=1e-5, consistent with patterns used elsewhere in the test suite.
1 parent a2a3659 commit 9443469

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

test/TorchSharpTest/TestJIT.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ public void TestLoadJIT_3()
161161

162162
Assert.Equal(new long[] { 10 }, t.shape);
163163
Assert.Equal(torch.float32, t.dtype);
164-
Assert.True(torch.tensor(new float[] { 0.564213157f, -0.04519982f, -0.005117342f, 0.395530462f, -0.3780813f, -0.004734449f, -0.3221216f, -0.289159119f, 0.268511474f, 0.180702567f }).allclose(t));
164+
Assert.True(torch.tensor(new float[] { 0.564213157f, -0.04519982f, -0.005117342f, 0.395530462f, -0.3780813f, -0.004734449f, -0.3221216f, -0.289159119f, 0.268511474f, 0.180702567f }).allclose(t, rtol: 1e-4, atol: 1e-5));
165165

166166
Assert.Throws<System.Runtime.InteropServices.ExternalException>(() => m.call(torch.ones(100)));
167167
}

0 commit comments

Comments
 (0)