Skip to content

Commit ae803ef

Browse files
authored
Objective sensitivity for Conic and Quadratic (#330)
* WIP obj sensib fallbacks for conic and quad * adjust tests * format * add quad tests * fix tests * add grad tests * fixes * fix cov * add comments
1 parent 86ab229 commit ae803ef

File tree

8 files changed

+494
-81
lines changed

8 files changed

+494
-81
lines changed

src/ConicProgram/ConicProgram.jl

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -450,15 +450,23 @@ function MOI.get(
450450
return MOI.get(model.model, attr, ci)
451451
end
452452

453+
"""
454+
Method not supported for `DiffOpt.ConicProgram.Model` directly.
455+
However, a fallback is provided in `DiffOpt`.
456+
"""
453457
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
454-
return error(
455-
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
458+
return throw(
459+
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
456460
)
457461
end
458462

463+
"""
464+
Method not supported for `DiffOpt.ConicProgram.Model` directly.
465+
However, a fallback is provided in `DiffOpt`.
466+
"""
459467
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
460-
return error(
461-
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
468+
return throw(
469+
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
462470
)
463471
end
464472

src/QuadraticProgram/QuadraticProgram.jl

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -501,15 +501,23 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver)
501501
return model.linear_solver = linear_solver
502502
end
503503

504+
"""
505+
Method not supported for `DiffOpt.QuadraticProgram.Model` directly.
506+
However, a fallback is provided in `DiffOpt`.
507+
"""
504508
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
505-
return error(
506-
"ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
509+
return throw(
510+
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
507511
)
508512
end
509513

514+
"""
515+
Method not supported for `DiffOpt.QuadraticProgram.Model` directly.
516+
However, a fallback is provided in `DiffOpt`.
517+
"""
510518
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
511-
return error(
512-
"ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
519+
return throw(
520+
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
513521
)
514522
end
515523

src/jump_wrapper.jl

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,3 +143,21 @@ Get the value of a variable output sensitivity for forward mode.
143143
function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
144144
return MOI.get(model, ForwardVariablePrimal(), variable)
145145
end
146+
147+
"""
148+
set_reverse_objective(model::JuMP.Model, value::Number)
149+
150+
Set the value of the objective input sensitivity for reverse mode.
151+
"""
152+
function set_reverse_objective(model::JuMP.Model, value::Number)
153+
return MOI.set(model, ReverseObjectiveSensitivity(), value)
154+
end
155+
156+
"""
157+
get_forward_objective(model::JuMP.Model)
158+
159+
Get the value of the objective output sensitivity for forward mode.
160+
"""
161+
function get_forward_objective(model::JuMP.Model)
162+
return MOI.get(model, ForwardObjectiveSensitivity())
163+
end

src/moi_wrapper.jl

Lines changed: 94 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -574,11 +574,80 @@ function reverse_differentiate!(model::Optimizer)
574574
MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value)
575575
end
576576
if !iszero(model.input_cache.dobj)
577-
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
577+
try
578+
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
579+
catch e
580+
if e isa MOI.UnsupportedAttribute
581+
_fallback_set_reverse_objective_sensitivity(
582+
model,
583+
model.input_cache.dobj,
584+
)
585+
else
586+
rethrow(e)
587+
end
588+
end
578589
end
579590
return reverse_differentiate!(diff)
580591
end
581592

593+
# Gradient evaluation functions for objective sensitivity fallbacks
594+
function _eval_gradient(::Optimizer, ::Number)
595+
return Dict{MOI.VariableIndex,Float64}()
596+
end
597+
598+
function _eval_gradient(::Optimizer, f::MOI.VariableIndex)
599+
return Dict{MOI.VariableIndex,Float64}(f => 1.0)
600+
end
601+
602+
function _eval_gradient(::Optimizer, f::MOI.ScalarAffineFunction{Float64})
603+
grad = Dict{MOI.VariableIndex,Float64}()
604+
for term in f.terms
605+
grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient
606+
end
607+
return grad
608+
end
609+
610+
function _eval_gradient(
611+
model::Optimizer,
612+
f::MOI.ScalarQuadraticFunction{Float64},
613+
)
614+
grad = Dict{MOI.VariableIndex,Float64}()
615+
for term in f.affine_terms
616+
grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient
617+
end
618+
# MOI convention: function is 0.5 * x' * Q * x, so derivative of diagonal
619+
# term 0.5 * coef * xi^2 is coef * xi (not 2 * coef * xi)
620+
for term in f.quadratic_terms
621+
xi, xj = term.variable_1, term.variable_2
622+
coef = term.coefficient
623+
xi_val = MOI.get(model, MOI.VariablePrimal(), xi)
624+
xj_val = MOI.get(model, MOI.VariablePrimal(), xj)
625+
if xi == xj
626+
grad[xi] = get(grad, xi, 0.0) + coef * xi_val
627+
else
628+
grad[xi] = get(grad, xi, 0.0) + coef * xj_val
629+
grad[xj] = get(grad, xj, 0.0) + coef * xi_val
630+
end
631+
end
632+
return grad
633+
end
634+
635+
function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val)
636+
diff = _diff(model)
637+
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
638+
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
639+
grad = _eval_gradient(model, obj_func)
640+
for (xi, df_dxi) in grad
641+
MOI.set(
642+
diff,
643+
ReverseVariablePrimal(),
644+
model.index_map[xi],
645+
df_dxi * val,
646+
)
647+
end
648+
return
649+
end
650+
582651
function _copy_forward_in_constraint(diff, index_map, con_map, constraints)
583652
for (index, value) in constraints
584653
MOI.set(
@@ -830,7 +899,30 @@ function MOI.get(
830899
end
831900

832901
function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity)
833-
return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr)
902+
diff_model = _checked_diff(model, attr, :forward_differentiate!)
903+
val = 0.0
904+
try
905+
val = MOI.get(diff_model, attr)
906+
catch e
907+
if e isa MOI.UnsupportedAttribute
908+
val = _fallback_get_forward_objective_sensitivity(model)
909+
else
910+
rethrow(e)
911+
end
912+
end
913+
return val
914+
end
915+
916+
function _fallback_get_forward_objective_sensitivity(model::Optimizer)
917+
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
918+
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
919+
grad = _eval_gradient(model, obj_func)
920+
ret = 0.0
921+
for (xi, df_dxi) in grad
922+
dx_dp = MOI.get(model, ForwardVariablePrimal(), xi)
923+
ret += df_dxi * dx_dp
924+
end
925+
return ret
834926
end
835927

836928
function MOI.supports(

test/conic_program.jl

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -841,39 +841,6 @@ function test_jump_psd_cone_with_parameter_pv_v_pv()
841841
@test dx 0.0 atol = 1e-4 rtol = 1e-4
842842
end
843843

844-
function test_ObjectiveSensitivity()
845-
model = DiffOpt.conic_diff_model(SCS.Optimizer)
846-
@variable(model, x)
847-
@variable(model, p in MOI.Parameter(1.0))
848-
@constraint(
849-
model,
850-
con,
851-
[p * x, (2 * x - 3), p * 3 * x] in
852-
MOI.PositiveSemidefiniteConeTriangle(2)
853-
)
854-
@objective(model, Min, x)
855-
optimize!(model)
856-
direction_p = 2.0
857-
DiffOpt.set_forward_parameter(model, p, direction_p)
858-
859-
DiffOpt.forward_differentiate!(model)
860-
861-
# TODO: Change when implemented
862-
@test_throws ErrorException(
863-
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
864-
) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity())
865-
866-
# Clean up
867-
DiffOpt.empty_input_sensitivities!(model)
868-
869-
# TODO: Change when implemented
870-
MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5)
871-
872-
@test_throws ErrorException(
873-
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
874-
) DiffOpt.reverse_differentiate!(model)
875-
end
876-
877844
end # module
878845

879846
TestConicProgram.runtests()

0 commit comments

Comments
 (0)