Skip to content

Commit 97a76df

Browse files
committed
format and try to fix tests
1 parent 47509b6 commit 97a76df

5 files changed

Lines changed: 54 additions & 43 deletions

File tree

src/ADNLPProblems/bard.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ end
88
function bard(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwargs...) where {T}
99
y = Rational{Int}[0.14 0.18 0.22 0.25 0.29 0.32 0.35 0.39 0.37 0.58 0.73 0.16 1.34 2.10 4.39]
1010
function f(x)
11-
return 1 // 2 * sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
11+
return 1 // 2 *
12+
sum((y[i] - (x[1] + i / ((16 - i) * x[2] + min(i, 16 - i) * x[3])))^2 for i = 1:15)
1213
end
1314
x0 = ones(T, 3)
1415
return ADNLPModels.ADNLPModel(f, x0, name = "bard"; kwargs...)

src/ADNLPProblems/watson.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ function watson(::Val{:nlp}; n::Int = default_nvar, type::Type{T} = Float64, kwa
2020
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) -
2121
sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
2222
)^2 +
23-
1 // 2 * (
23+
1 // 2 *
24+
(
2425
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
2526
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
2627
)^2

src/PureJuMP/watson.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,8 @@ function watson(args...; n::Int = default_nvar, kwargs...)
3737
(
3838
sum((j - 1) * x[j] * x[1]^(j - 2) for j = 2:n) - sum(x[j] * x[1]^(j - 1) for j = 1:n)^2 - 1
3939
)^2 +
40-
0.5 * (
40+
0.5 *
41+
(
4142
sum((j - 1) * x[j] * (x[2] - x[1]^2 - 1)^(j - 2) for j = 2:n) -
4243
sum(x[j] * (x[2] - x[1]^2 - 1)^(j - 1) for j = 1:n)^2 - 1
4344
)^2

test/runtests.jl

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@ addprocs(np - 1)
1414
[n for n in names(mod) if isdefined(mod, n)]
1515
end
1616

17-
@everywhere const list_problems = setdiff(union(defined_names(ADNLPProblems), defined_names(PureJuMP)), [:PureJuMP, :ADNLPProblems])
17+
@everywhere const list_problems =
18+
setdiff(union(defined_names(ADNLPProblems), defined_names(PureJuMP)), [:PureJuMP, :ADNLPProblems])
1819

1920
@testset "Test that all problems have a meta" begin
2021
@test sort(list_problems) == sort(Symbol.(OptimizationProblems.meta[!, :name]))
@@ -25,7 +26,8 @@ end
2526
# TODO: tests are limited for JuMP-only problems
2627
@everywhere const list_problems_not_ADNLPProblems =
2728
Symbol[:catmix, :gasoil, :glider, :methanol, :minsurf, :pinene, :rocket, :steering, :torsion]
28-
@everywhere const list_problems_ADNLPProblems = setdiff(list_problems, list_problems_not_ADNLPProblems)
29+
@everywhere const list_problems_ADNLPProblems =
30+
setdiff(list_problems, list_problems_not_ADNLPProblems)
2931
@everywhere const list_problems_not_PureJuMP = Symbol[]
3032
@everywhere const list_problems_PureJuMP = setdiff(list_problems, list_problems_not_PureJuMP)
3133

@@ -48,7 +50,7 @@ end
4850
error("Problem $(prob) is not defined in $mod on pid $(myid()).")
4951
end
5052
ctor = getfield(mod, prob)
51-
return MathOptNLPModel(ctor(;kwargs...); name = "$prob")
53+
return MathOptNLPModel(ctor(; kwargs...); name = "$prob")
5254
end
5355

5456
@everywhere function make_ad_nlp(prob::Symbol; kwargs...)
@@ -85,7 +87,8 @@ include("test-in-place-residual.jl")
8587
@test !isnothing(obj(nlp_ad, nlp_ad.meta.x0))
8688
end
8789

88-
if pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name]
90+
if (typeof(nlp_ad) <: ADNLPModels.AbstractADNLPModel) &&
91+
(pb in meta[(meta.contype .== :quadratic) .| (meta.contype .== :general), :name])
8992
@testset "Test In-place Nonlinear Constraints for AD-$prob" begin
9093
test_in_place_constraints(prob, nlp_ad)
9194
end

test/test-in-place-residual.jl

Lines changed: 41 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,41 @@
1-
@everywhere function test_in_place_residual(prob::Symbol)
2-
nlp = make_ad_nlp(prob; use_nls = false)
3-
@test typeof(nlp) <: ADNLPModels.ADNLPModel
4-
nls = make_ad_nlp(prob; use_nls = true)
5-
@test typeof(nls) <: ADNLPModels.ADNLSModel
6-
return test_in_place_residual(prob, nlp, nls)
7-
end
8-
9-
@everywhere function test_in_place_residual(prob::Symbol, nlp::AbstractNLPModel, nls::AbstractNLSModel)
10-
@testset "Test in-place residual $prob" begin
11-
x = nls.meta.x0
12-
Fx = similar(x, nls.nls_meta.nequ)
13-
pb = String(prob)
14-
if VERSION v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
15-
@allocated residual!(nls, x, Fx)
16-
@test (@allocated residual!(nls, x, Fx)) == 0
17-
end
18-
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
19-
@test nls.nls_meta.nequ == m
20-
end
21-
22-
@testset "Compare NLS with NLP $prob: x0 and obj are the same." begin
23-
x0 = nlp.meta.x0
24-
@test x0 == nls.meta.x0
25-
nlp_fx = obj(nlp, x0)
26-
nls_fx = obj(nls, x0)
27-
are_almost_same = (nlp_fx nls_fx) | (nlp_fx 2 * nls_fx)
28-
if !(are_almost_same)
29-
@info "$prob : NLS $(nls_fx) ≈ NLP $(nlp_fx)"
30-
end
31-
@test are_almost_same
32-
end
33-
end
34-
35-
nls_name_list = intersect(Symbol.(meta[meta.objtype .== :least_squares, :name]), list_problems_ADNLPProblems)
36-
pmap(test_in_place_residual, nls_name_list)
1+
@everywhere function test_in_place_residual(prob::Symbol)
2+
nlp = make_ad_nlp(prob; use_nls = false)
3+
@test typeof(nlp) <: ADNLPModels.ADNLPModel
4+
nls = make_ad_nlp(prob; use_nls = true)
5+
@test typeof(nls) <: ADNLPModels.ADNLSModel
6+
return test_in_place_residual(prob, nlp, nls)
7+
end
8+
9+
@everywhere function test_in_place_residual(
10+
prob::Symbol,
11+
nlp::AbstractNLPModel,
12+
nls::AbstractNLSModel,
13+
)
14+
@testset "Test in-place residual $prob" begin
15+
x = nls.meta.x0
16+
Fx = similar(x, nls.nls_meta.nequ)
17+
pb = String(prob)
18+
if VERSION v"1.7" && !occursin("palmer", pb) && (pb != "watson") # palmer residual allocate
19+
@allocated residual!(nls, x, Fx)
20+
@test (@allocated residual!(nls, x, Fx)) == 0
21+
end
22+
m = OptimizationProblems.eval(Meta.parse("get_$(prob)_nls_nequ"))()
23+
@test nls.nls_meta.nequ == m
24+
end
25+
26+
@testset "Compare NLS with NLP $prob: x0 and obj are the same." begin
27+
x0 = nlp.meta.x0
28+
@test x0 == nls.meta.x0
29+
nlp_fx = obj(nlp, x0)
30+
nls_fx = obj(nls, x0)
31+
are_almost_same = (nlp_fx nls_fx) | (nlp_fx 2 * nls_fx)
32+
if !(are_almost_same)
33+
@info "$prob : NLS $(nls_fx) ≈ NLP $(nlp_fx)"
34+
end
35+
@test are_almost_same
36+
end
37+
end
38+
39+
nls_name_list =
40+
intersect(Symbol.(meta[meta.objtype .== :least_squares, :name]), list_problems_ADNLPProblems)
41+
pmap(test_in_place_residual, nls_name_list)

0 commit comments

Comments
 (0)