@@ -574,11 +574,80 @@ function reverse_differentiate!(model::Optimizer)
574574 MOI. set (diff, ReverseConstraintDual (), model. index_map[vi], value)
575575 end
576576 if ! iszero (model. input_cache. dobj)
577- MOI. set (diff, ReverseObjectiveSensitivity (), model. input_cache. dobj)
577+ try
578+ MOI. set (diff, ReverseObjectiveSensitivity (), model. input_cache. dobj)
579+ catch e
580+ if e isa MOI. UnsupportedAttribute
581+ _fallback_set_reverse_objective_sensitivity (
582+ model,
583+ model. input_cache. dobj,
584+ )
585+ else
586+ rethrow (e)
587+ end
588+ end
578589 end
579590 return reverse_differentiate! (diff)
580591end
581592
593+ # Gradient evaluation functions for objective sensitivity fallbacks
594+ function _eval_gradient (:: Optimizer , :: Number )
595+ return Dict {MOI.VariableIndex,Float64} ()
596+ end
597+
598+ function _eval_gradient (:: Optimizer , f:: MOI.VariableIndex )
599+ return Dict {MOI.VariableIndex,Float64} (f => 1.0 )
600+ end
601+
602+ function _eval_gradient (:: Optimizer , f:: MOI.ScalarAffineFunction{Float64} )
603+ grad = Dict {MOI.VariableIndex,Float64} ()
604+ for term in f. terms
605+ grad[term. variable] = get (grad, term. variable, 0.0 ) + term. coefficient
606+ end
607+ return grad
608+ end
609+
610+ function _eval_gradient (
611+ model:: Optimizer ,
612+ f:: MOI.ScalarQuadraticFunction{Float64} ,
613+ )
614+ grad = Dict {MOI.VariableIndex,Float64} ()
615+ for term in f. affine_terms
616+ grad[term. variable] = get (grad, term. variable, 0.0 ) + term. coefficient
617+ end
618+ # MOI convention: function is 0.5 * x' * Q * x, so derivative of diagonal
619+ # term 0.5 * coef * xi^2 is coef * xi (not 2 * coef * xi)
620+ for term in f. quadratic_terms
621+ xi, xj = term. variable_1, term. variable_2
622+ coef = term. coefficient
623+ xi_val = MOI. get (model, MOI. VariablePrimal (), xi)
624+ xj_val = MOI. get (model, MOI. VariablePrimal (), xj)
625+ if xi == xj
626+ grad[xi] = get (grad, xi, 0.0 ) + coef * xi_val
627+ else
628+ grad[xi] = get (grad, xi, 0.0 ) + coef * xj_val
629+ grad[xj] = get (grad, xj, 0.0 ) + coef * xi_val
630+ end
631+ end
632+ return grad
633+ end
634+
635+ function _fallback_set_reverse_objective_sensitivity (model:: Optimizer , val)
636+ diff = _diff (model)
637+ obj_type = MOI. get (model, MOI. ObjectiveFunctionType ())
638+ obj_func = MOI. get (model, MOI. ObjectiveFunction {obj_type} ())
639+ grad = _eval_gradient (model, obj_func)
640+ for (xi, df_dxi) in grad
641+ MOI. set (
642+ diff,
643+ ReverseVariablePrimal (),
644+ model. index_map[xi],
645+ df_dxi * val,
646+ )
647+ end
648+ return
649+ end
650+
582651function _copy_forward_in_constraint (diff, index_map, con_map, constraints)
583652 for (index, value) in constraints
584653 MOI. set (
@@ -830,7 +899,30 @@ function MOI.get(
830899end
831900
832901function MOI. get (model:: Optimizer , attr:: ForwardObjectiveSensitivity )
833- return MOI. get (_checked_diff (model, attr, :forward_differentiate! ), attr)
902+ diff_model = _checked_diff (model, attr, :forward_differentiate! )
903+ val = 0.0
904+ try
905+ val = MOI. get (diff_model, attr)
906+ catch e
907+ if e isa MOI. UnsupportedAttribute
908+ val = _fallback_get_forward_objective_sensitivity (model)
909+ else
910+ rethrow (e)
911+ end
912+ end
913+ return val
914+ end
915+
916+ function _fallback_get_forward_objective_sensitivity (model:: Optimizer )
917+ obj_type = MOI. get (model, MOI. ObjectiveFunctionType ())
918+ obj_func = MOI. get (model, MOI. ObjectiveFunction {obj_type} ())
919+ grad = _eval_gradient (model, obj_func)
920+ ret = 0.0
921+ for (xi, df_dxi) in grad
922+ dx_dp = MOI. get (model, ForwardVariablePrimal (), xi)
923+ ret += df_dxi * dx_dp
924+ end
925+ return ret
834926end
835927
836928function MOI. supports (
0 commit comments