Skip to content

Commit a65713a

Browse files
committed
Add derivative check for Jacobian of residual
1 parent 0d76c9c commit a65713a

1 file changed

Lines changed: 43 additions & 0 deletions

File tree

test/runtests.jl

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,46 @@ using BundleAdjustmentModels, DataFrames, LinearAlgebra, NLPModels, Pkg, Test
22

33
include("testBundleAdjustmentModels.jl")
44
include("testBundleAdjustmentAllocations.jl")
5+
6+
# https://github.com/JuliaSmoothOptimizers/NLPModelsTest.jl/blob/src/dercheck.jl#L43
7+
function jacobian_residual_check(
8+
nlp::AbstractNLSModel;
9+
x::AbstractVector = nlp.meta.x0,
10+
atol::Float64 = 1.0e-6,
11+
rtol::Float64 = 1.0e-4,
12+
)
13+
14+
# Fast exit if there are no constraints.
15+
J_errs = Dict{Tuple{Int, Int}, Float64}()
16+
nlp.nls_meta.nequ > 0 || return J_errs
17+
18+
# Optimal-ish step for second-order centered finite differences.
19+
step = (eps(Float64) / 3)^(1 / 3)
20+
21+
# Check constraints Jacobian.
22+
J = jac_residual(nlp, x)
23+
h = zeros(nlp.meta.nvar)
24+
cxph = zeros(nlp.nls_meta.nequ)
25+
cxmh = zeros(nlp.nls_meta.nequ)
26+
# Differentiate all constraints with respect to each variable in turn.
27+
for i = 1:(nlp.meta.nvar)
28+
h[i] = step
29+
residual!(nlp, x + h, cxph)
30+
residual!(nlp, x - h, cxmh)
31+
dcdxi = (cxph - cxmh) / 2 / step
32+
for j = 1:(nlp.nls_meta.nequ)
33+
err = abs(dcdxi[j] - J[j, i])
34+
if err > atol + rtol * abs(dcdxi[j])
35+
J_errs[(j, i)] = err
36+
end
37+
end
38+
h[i] = 0
39+
end
40+
return J_errs
41+
end
42+
43+
@testset "Test derivative Jacobian of residual" begin
44+
nls = BundleAdjustmentModel("problem-49-7776-pre")
45+
x = 10 * [-(-1.0)^i for i = 1:nls.meta.nvar]
46+
@test length(jacobian_residual_check(nls, x = x)) == 0
47+
end

0 commit comments

Comments
 (0)