Skip to content

Commit 2f7fcc7

Browse files
committed
WIP obj sensib fallbacks for conic and quad
1 parent 1b3cfe1 commit 2f7fcc7

5 files changed

Lines changed: 230 additions & 10 deletions

File tree

src/ConicProgram/ConicProgram.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -451,14 +451,14 @@ function MOI.get(
451451
end
452452

453453
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
454-
return error(
455-
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
454+
return throw(
455+
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
456456
)
457457
end
458458

459459
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
460-
return error(
461-
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
460+
return throw(
461+
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
462462
)
463463
end
464464

src/QuadraticProgram/QuadraticProgram.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -502,14 +502,14 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver)
502502
end
503503

504504
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
505-
return error(
506-
"ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
505+
return throw(
506+
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
507507
)
508508
end
509509

510510
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
511-
return error(
512-
"ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
511+
return throw(
512+
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
513513
)
514514
end
515515

src/jump_wrapper.jl

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,3 +143,25 @@ Get the value of a variable output sensitivity for forward mode.
143143
function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
144144
return MOI.get(model, ForwardVariablePrimal(), variable)
145145
end
146+
147+
148+
"""
149+
set_reverse_objective(model::JuMP.Model, value::Number)
150+
151+
Set the value of the objective input sensitivity for reverse mode.
152+
"""
153+
function set_reverse_objective(
154+
model::JuMP.Model,
155+
value::Number,
156+
)
157+
return MOI.set(model, ReverseObjectiveSensitivity(), value)
158+
end
159+
160+
"""
161+
get_forward_objective(model::JuMP.Model)
162+
163+
Get the value of the objective output sensitivity for forward mode.
164+
"""
165+
function get_forward_objective(model::JuMP.Model)
166+
return MOI.get(model, ForwardObjectiveSensitivity())
167+
end

src/moi_wrapper.jl

Lines changed: 103 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -574,11 +574,61 @@ function reverse_differentiate!(model::Optimizer)
574574
MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value)
575575
end
576576
if !iszero(model.input_cache.dobj)
577-
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
577+
if !isempty(model.input_cache.dx)
578+
error(
579+
"Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.",
580+
)
581+
end
582+
try
583+
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
584+
catch e
585+
if e isa MOI.UnsupportedAttribute
586+
_fallback_set_reverse_objective_sensitivity(model, model.input_cache.dobj)
587+
else
588+
rethrow(e)
589+
end
590+
end
578591
end
579592
return reverse_differentiate!(diff)
580593
end
581594

595+
function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val)
596+
diff = _diff(model)
597+
obj_type = MOI.get(
598+
model,
599+
MOI.ObjectiveFunctionType(),
600+
)
601+
obj_func = MOI.get(
602+
model,
603+
MOI.ObjectiveFunction{obj_type}(),
604+
)
605+
for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func)
606+
df_dx = MOI.Nonlinear.SymbolicAD.simplify!(
607+
MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi),
608+
)
609+
if iszero(df_dx)
610+
continue
611+
end
612+
dd = 0.0
613+
if df_dx isa Number
614+
dd = df_dx * val
615+
elseif df_dx isa MOI.ScalarAffineFunction{Float64}
616+
for term in df_dx.terms
617+
xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable)
618+
dd += term.coefficient * xj_val * val
619+
end
620+
dd += df_dx * val
621+
else
622+
error(
623+
"Cannot compute forward objective sensitivity fallback: " *
624+
"unsupported derivative found.",
625+
)
626+
end
627+
MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], dd)
628+
end
629+
return
630+
end
631+
582632
function _copy_forward_in_constraint(diff, index_map, con_map, constraints)
583633
for (index, value) in constraints
584634
MOI.set(
@@ -830,7 +880,58 @@ function MOI.get(
830880
end
831881

832882
function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity)
833-
return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr)
883+
diff_model = _checked_diff(model, attr, :forward_differentiate!)
884+
val = 0.0
885+
try
886+
val = MOI.get(diff_model, attr)
887+
catch e
888+
if e isa MOI.UnsupportedAttribute
889+
val = _fallback_get_forward_objective_sensitivity(model)
890+
else
891+
rethrow(e)
892+
end
893+
end
894+
return val
895+
end
896+
897+
function _fallback_get_forward_objective_sensitivity(model::Optimizer)
898+
ret = 0.0
899+
obj_type = MOI.get(
900+
model,
901+
MOI.ObjectiveFunctionType(),
902+
)
903+
obj_func = MOI.get(
904+
model,
905+
MOI.ObjectiveFunction{obj_type}(),
906+
)
907+
for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func)
908+
df_dx = MOI.Nonlinear.SymbolicAD.simplify!(
909+
MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi),
910+
)
911+
if iszero(df_dx)
912+
continue
913+
end
914+
dx_dp = MOI.get(
915+
model,
916+
ForwardVariablePrimal(),
917+
xi,
918+
)
919+
if df_dx isa Number
920+
ret += df_dx * dx_dp
921+
elseif df_dx isa MOI.ScalarAffineFunction{Float64}
922+
for term in df_dx.terms
923+
xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable)
924+
ret += term.coefficient * xj_val * dx_dp
925+
end
926+
ret += df_dx.constant * dx_dp
927+
else
928+
error(
929+
"Cannot compute forward objective sensitivity fallback: " *
930+
"unsupported derivative found.",
931+
)
932+
end
933+
end
934+
return ret
834935
end
835936

836937
function MOI.supports(

test/jump_wrapper.jl

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,103 @@ function runtests()
2929
return
3030
end
3131

32+
function test_obj()
33+
34+
for (MODEL, SOLVER) in [
35+
(DiffOpt.diff_model, HiGHS.Optimizer),
36+
# (DiffOpt.diff_model, SCS.Optimizer),
37+
# (DiffOpt.diff_model, Ipopt.Optimizer),
38+
# (DiffOpt.quadratic_diff_model, HiGHS.Optimizer),
39+
# (DiffOpt.quadratic_diff_model, SCS.Optimizer),
40+
# (DiffOpt.quadratic_diff_model, Ipopt.Optimizer),
41+
# (DiffOpt.conic_diff_model, HiGHS.Optimizer),
42+
# (DiffOpt.conic_diff_model, SCS.Optimizer),
43+
# (DiffOpt.conic_diff_model, Ipopt.Optimizer),
44+
# (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer),
45+
# (DiffOpt.nonlinear_diff_model, SCS.Optimizer),
46+
# (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer),
47+
],
48+
# ineq in [true, false],
49+
# _min in [true, false],
50+
# flip in [true, false],
51+
with_bridge_type in [Float64, nothing]
52+
53+
if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer
54+
continue
55+
end
56+
57+
58+
MODEL = DiffOpt.diff_model
59+
SOLVER = HiGHS.Optimizer
60+
with_bridge_type = Float64
61+
ineq = false
62+
_min = true
63+
flip = false
64+
65+
@testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin
66+
model = MODEL(SOLVER; with_bridge_type)
67+
set_silent(model)
68+
69+
p_val = 4.0
70+
pc_val = 2.0
71+
@variable(model, x)
72+
@variable(model, p in Parameter(p_val))
73+
@variable(model, pc in Parameter(pc_val))
74+
# if ineq
75+
# if !flip
76+
# cons = @constraint(model, con, pc * x >= 3 * p)
77+
# else
78+
# cons = @constraint(model, con, pc * x <= 3 * p)
79+
# end
80+
# else
81+
cons = @constraint(model, con, pc * x == 3 * p)
82+
# end
83+
# sign = flip ? -1 : 1
84+
# if _min
85+
# @objective(model, Min, 2x * sign)
86+
# else
87+
# @objective(model, Max, -2x * sign)
88+
# end
89+
90+
for obj_coef in [-3, 2, 5]
91+
@objective(model, Min, obj_coef * x)
92+
93+
optimize!(model)
94+
@test value(x) 3 * p_val / pc_val atol = ATOL rtol = RTOL
95+
96+
DiffOpt.empty_input_sensitivities!(model)
97+
direction_obj = 2.0
98+
DiffOpt.set_reverse_objective(model, direction_obj)
99+
DiffOpt.reverse_differentiate!(model)
100+
@test DiffOpt.get_reverse_parameter(model, p) obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL
101+
@test DiffOpt.get_reverse_parameter(model, pc) -obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL
102+
103+
DiffOpt.empty_input_sensitivities!(model)
104+
direction_p = 3.0
105+
DiffOpt.set_forward_parameter(model, p, direction_p)
106+
DiffOpt.forward_differentiate!(model)
107+
@test DiffOpt.get_forward_objective(model) obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL
108+
109+
# stop differentiating with respect to p
110+
DiffOpt.empty_input_sensitivities!(model)
111+
# differentiate w.r.t. pc
112+
direction_pc = 10.0
113+
DiffOpt.set_forward_parameter(model, pc, direction_pc)
114+
DiffOpt.forward_differentiate!(model)
115+
@test DiffOpt.get_forward_objective(model)
116+
- obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL
117+
118+
end
119+
120+
121+
end
122+
end
123+
124+
return
125+
end
126+
127+
# TODO test quadratic obj
128+
32129
function test_jump_api()
33130
for (MODEL, SOLVER) in [
34131
(DiffOpt.diff_model, HiGHS.Optimizer),

0 commit comments

Comments
 (0)