-
Notifications
You must be signed in to change notification settings - Fork 48
Expand file tree
/
Copy pathtest-utils.jl
More file actions
131 lines (114 loc) · 4.7 KB
/
test-utils.jl
File metadata and controls
131 lines (114 loc) · 4.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
@everywhere const ndef = OptimizationProblems.default_nvar
@test ndef == OptimizationProblems.PureJuMP.default_nvar
@test ndef == OptimizationProblems.ADNLPProblems.default_nvar
@everywhere const test_nvar = Int(round(ndef / 2))
@everywhere meta = OptimizationProblems.meta
@everywhere function meta_sanity_check(prob::Symbol, nlp::AbstractNLPModel)
meta = OptimizationProblems.eval(Symbol(prob, :_meta))
getnvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))(n = test_nvar)
@test getnvar == meta[:nvar] || meta[:variable_nvar]
getncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))(n = test_nvar)
@test getncon == meta[:ncon] || meta[:variable_ncon]
getnlin = OptimizationProblems.eval(Symbol(:get_, prob, :_nlin))(n = test_nvar)
@test getnlin == nlp.meta.nlin || meta[:variable_ncon]
getnnln = OptimizationProblems.eval(Symbol(:get_, prob, :_nnln))(n = test_nvar)
@test getnnln == nlp.meta.nnln || meta[:variable_ncon]
getnequ = OptimizationProblems.eval(Symbol(:get_, prob, :_nequ))(n = test_nvar)
@test getnequ == length(get_jfix(nlp)) || meta[:variable_ncon]
getnineq = OptimizationProblems.eval(Symbol(:get_, prob, :_nineq))(n = test_nvar)
@test getnineq == (get_ncon(nlp) - length(get_jfix(nlp))) || meta[:variable_ncon]
@test meta[:best_known_lower_bound] <= meta[:best_known_upper_bound]
@test meta[:minimize] == get_minimize(nlp)
@test meta[:has_equalities_only] == (length(get_jfix(nlp)) == get_ncon(nlp) > 0)
@test meta[:has_inequalities_only] == (get_ncon(nlp) > 0 && length(get_jfix(nlp)) == 0)
@test meta[:has_bounds] == (length(get_ifree(nlp)) < get_nvar(nlp))
@test meta[:has_fixed_variables] == (get_ifix(nlp) != [])
end
@everywhere function test_in_place_constraints(prob::Symbol)
nlp = OptimizationProblems.ADNLPProblems.eval(prob)()
return test_in_place_constraints(prob, nlp)
end
@everywhere function test_in_place_constraints(prob::Symbol, nlp::AbstractNLPModel)
x = get_x0(nlp)
ncon = nlp.meta.nnln
@test ncon > 0
cx = similar(x, ncon)
if VERSION ≥ v"1.7"
@allocated cons_nln!(nlp, x, cx)
@test (@allocated cons_nln!(nlp, x, cx)) == 0
end
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nnln"))()
@test ncon == m
end
@everywhere function test_compatibility(prob::Symbol, ndef::Integer = ndef)
prob_fn = eval(Meta.parse("PureJuMP.$(prob)"))
model = prob_fn(n = ndef)
nlp_jump = MathOptNLPModel(model)
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
ncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))()
nlp_ad = make_ad_nlp(prob)
return test_compatibility(prob, nlp_jump, nlp_ad, ndef)
end
@everywhere function test_compatibility(
prob::Symbol,
nlp_jump,
nlp_ad::ADNLPModels.ADModel,
ndef::Integer = ndef,
)
@test nlp_jump.meta.nvar == nlp_ad.meta.nvar
@test nlp_jump.meta.x0 == nlp_ad.meta.x0
@test nlp_jump.meta.ncon == nlp_ad.meta.ncon
@test nlp_jump.meta.lvar == nlp_ad.meta.lvar
@test nlp_jump.meta.uvar == nlp_ad.meta.uvar
x1 = nlp_ad.meta.x0
x2 = nlp_ad.meta.x0 .+ 0.01
n0 = max(abs(obj(nlp_ad, nlp_ad.meta.x0)), 1)
obj_tol = 1e-10
if !(prob in [:triangle_pacman, :triangle_deer]) # precision issue
if isnan(n0)
@test isnan(obj(nlp_jump, x1))
else
@test isapprox(obj(nlp_ad, x1), obj(nlp_jump, x1), atol = obj_tol * n0)
end
n0 = max(abs(obj(nlp_ad, x2)), 1)
if isnan(n0)
@test isnan(obj(nlp_jump, x2))
else
@test isapprox(obj(nlp_ad, x2), obj(nlp_jump, x2), atol = obj_tol * n0)
end
end
grad(nlp_ad, x1) # just test that it runs
if nlp_ad.meta.ncon > 0
cons_tol = 1e-10
@test nlp_ad.meta.lcon ≈ nlp_jump.meta.lcon
@test nlp_ad.meta.ucon ≈ nlp_jump.meta.ucon
@test all(isapprox.(cons(nlp_ad, x1), cons(nlp_jump, x1), atol = cons_tol * n0))
@test all(isapprox.(cons(nlp_ad, x2), cons(nlp_jump, x2), atol = cons_tol * n0))
@test nlp_jump.meta.lin == nlp_ad.meta.lin
end
meta_sanity_check(prob, nlp_ad)
end
@everywhere function test_multi_precision(
prob::Symbol,
nlp_ad::ADNLPModels.ADNLPModel{T};
list_types = [Float32, Float64],
) where {T}
test_multi_precision(T, nlp_ad)
test_multi_precision(prob, list_types = setdiff(list_types, [T]))
end
@everywhere function test_multi_precision(prob::Symbol; list_types = [Float32, Float64])
nvar = OptimizationProblems.eval(Symbol(:get_, prob, :_nvar))()
ncon = OptimizationProblems.eval(Symbol(:get_, prob, :_ncon))()
for T in list_types
nlp = make_ad_nlp(prob; type = T)
test_multi_precision(T, nlp)
end
end
@everywhere function test_multi_precision(::Type{T}, nlp::AbstractNLPModel) where {T}
x0 = get_x0(nlp)
@test eltype(x0) == T
@test typeof(obj(nlp, x0)) == T
if get_ncon(nlp) > 0
@test eltype(cons(nlp, x0)) == T
end
end