From 2f7fcc79aeb8462afc0cbedcd6b9440389e689f0 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Wed, 12 Nov 2025 08:01:56 -0300 Subject: [PATCH 1/9] WIP obj sensib fallbacks for conic and quad --- src/ConicProgram/ConicProgram.jl | 8 +- src/QuadraticProgram/QuadraticProgram.jl | 8 +- src/jump_wrapper.jl | 22 +++++ src/moi_wrapper.jl | 105 ++++++++++++++++++++++- test/jump_wrapper.jl | 97 +++++++++++++++++++++ 5 files changed, 230 insertions(+), 10 deletions(-) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index abf21231a..8c317600d 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -451,14 +451,14 @@ function MOI.get( end function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/QuadraticProgram/QuadraticProgram.jl b/src/QuadraticProgram/QuadraticProgram.jl index 1ed588878..cf2da7886 100644 --- a/src/QuadraticProgram/QuadraticProgram.jl +++ b/src/QuadraticProgram/QuadraticProgram.jl @@ -502,14 +502,14 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver) end function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index ce26c4a5c..b88f6fd04 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -143,3 +143,25 @@ Get the value of a variable output sensitivity for forward mode. function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) return MOI.get(model, ForwardVariablePrimal(), variable) end + + +""" + set_reverse_objective(model::JuMP.Model, value::Number) + +Set the value of the objective input sensitivity for reverse mode. +""" +function set_reverse_objective( + model::JuMP.Model, + value::Number, +) + return MOI.set(model, ReverseObjectiveSensitivity(), value) +end + +""" + get_forward_objective(model::JuMP.Model) + +Get the value of the objective output sensitivity for forward mode. +""" +function get_forward_objective(model::JuMP.Model) + return MOI.get(model, ForwardObjectiveSensitivity()) +end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 56cd2096c..d876388a5 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -574,11 +574,61 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end if !iszero(model.input_cache.dobj) - MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + if !isempty(model.input_cache.dx) + error( + "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", + ) + end + try + MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + catch e + if e isa MOI.UnsupportedAttribute + _fallback_set_reverse_objective_sensitivity(model, model.input_cache.dobj) + else + rethrow(e) + end + end end return reverse_differentiate!(diff) end +function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) + diff = _diff(model) + obj_type = MOI.get( + model, + MOI.ObjectiveFunctionType(), + ) + obj_func = MOI.get( + model, + MOI.ObjectiveFunction{obj_type}(), + ) + for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) + df_dx = MOI.Nonlinear.SymbolicAD.simplify!( + MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), + ) + if iszero(df_dx) + continue + end + dd = 0.0 + if df_dx isa Number + dd = df_dx * val + elseif df_dx isa MOI.ScalarAffineFunction{Float64} + for term in df_dx.terms + xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) + dd += term.coefficient * xj_val * val + end + dd += df_dx * val + else + error( + "Cannot compute forward objective sensitivity fallback: " * + "unsupported derivative found.", + ) + end + MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], dd) + end + return +end + function _copy_forward_in_constraint(diff, index_map, con_map, constraints) for (index, value) in constraints MOI.set( @@ -830,7 +880,58 @@ function MOI.get( end function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) - return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr) + diff_model = _checked_diff(model, attr, :forward_differentiate!) + val = 0.0 + try + val = MOI.get(diff_model, attr) + catch e + if e isa MOI.UnsupportedAttribute + val = _fallback_get_forward_objective_sensitivity(model) + else + rethrow(e) + end + end + return val +end + +function _fallback_get_forward_objective_sensitivity(model::Optimizer) + ret = 0.0 + obj_type = MOI.get( + model, + MOI.ObjectiveFunctionType(), + ) + obj_func = MOI.get( + model, + MOI.ObjectiveFunction{obj_type}(), + ) + for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) + df_dx = MOI.Nonlinear.SymbolicAD.simplify!( + MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), + ) + if iszero(df_dx) + continue + end + dx_dp = MOI.get( + model, + ForwardVariablePrimal(), + xi, + ) + if df_dx isa Number + ret += df_dx * dx_dp + elseif df_dx isa MOI.ScalarAffineFunction{Float64} + for term in df_dx.terms + xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) + ret += term.coefficient * xj_val * dx_dp + end + ret += df_dx.constant * dx_dp + else + error( + "Cannot compute forward objective sensitivity fallback: " * + "unsupported derivative found.", + ) + end + end + return ret end function MOI.supports( diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 6588ab0c9..489843f25 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -29,6 +29,103 @@ function runtests() return end +function test_obj() + + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + # (DiffOpt.diff_model, SCS.Optimizer), + # (DiffOpt.diff_model, Ipopt.Optimizer), + # (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + # (DiffOpt.quadratic_diff_model, SCS.Optimizer), + # (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + # (DiffOpt.conic_diff_model, HiGHS.Optimizer), + # (DiffOpt.conic_diff_model, SCS.Optimizer), + # (DiffOpt.conic_diff_model, Ipopt.Optimizer), + # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + # (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + # ineq in [true, false], + # _min in [true, false], + # flip in [true, false], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + + + MODEL = DiffOpt.diff_model + SOLVER = HiGHS.Optimizer + with_bridge_type = Float64 + ineq = false + _min = true + flip = false + + @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + pc_val = 2.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @variable(model, pc in Parameter(pc_val)) + # if ineq + # if !flip + # cons = @constraint(model, con, pc * x >= 3 * p) + # else + # cons = @constraint(model, con, pc * x <= 3 * p) + # end + # else + cons = @constraint(model, con, pc * x == 3 * p) + # end + # sign = flip ? -1 : 1 + # if _min + # @objective(model, Min, 2x * sign) + # else + # @objective(model, Max, -2x * sign) + # end + + for obj_coef in [-3, 2, 5] + @objective(model, Min, obj_coef * x) + + optimize!(model) + @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ -obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + + # stop differentiating with respect to p + DiffOpt.empty_input_sensitivities!(model) + # differentiate w.r.t. pc + direction_pc = 10.0 + DiffOpt.set_forward_parameter(model, pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + - obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + + end + + + end + end + + return +end + +# TODO test quadratic obj + function test_jump_api() for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), From 351fdd4a55c499b8e9d22c77032ee17099995f27 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 1 Dec 2025 02:31:34 -0300 Subject: [PATCH 2/9] adjust tests --- test/jump_wrapper.jl | 133 ++++++++++++++++++++++++++++--------------- 1 file changed, 86 insertions(+), 47 deletions(-) diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 489843f25..adbb8f1fd 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -29,38 +29,79 @@ function runtests() return end -function test_obj() - - for (MODEL, SOLVER) in [ +function test_obj_simple() + + for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), - # (DiffOpt.diff_model, SCS.Optimizer), - # (DiffOpt.diff_model, Ipopt.Optimizer), - # (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), - # (DiffOpt.quadratic_diff_model, SCS.Optimizer), - # (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), - # (DiffOpt.conic_diff_model, HiGHS.Optimizer), - # (DiffOpt.conic_diff_model, SCS.Optimizer), - # (DiffOpt.conic_diff_model, Ipopt.Optimizer), - # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), - # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), - # (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), ], - # ineq in [true, false], - # _min in [true, false], - # flip in [true, false], + sign in [+1, -1], + sign_p in [-1, +1], + sense in [:Min, :Max], with_bridge_type in [Float64, nothing] if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer continue end + @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) - MODEL = DiffOpt.diff_model - SOLVER = HiGHS.Optimizer - with_bridge_type = Float64 - ineq = false - _min = true - flip = false + p_val = 4.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @constraint(model, con, x == 3 * sign_p * p) + @objective(model, Min, 2 * sign * x) + if sense == :Max + @objective(model, Max, 2 * sign * x) + end + optimize!(model) + @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL + + end + end + + return +end + +function test_obj() + + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + (DiffOpt.conic_diff_model, HiGHS.Optimizer), + (DiffOpt.conic_diff_model, SCS.Optimizer), + (DiffOpt.conic_diff_model, Ipopt.Optimizer), + (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + ineq in [true, false], + _min in [true, false], + flip in [true, false], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin model = MODEL(SOLVER; with_bridge_type) @@ -71,24 +112,25 @@ function test_obj() @variable(model, x) @variable(model, p in Parameter(p_val)) @variable(model, pc in Parameter(pc_val)) - # if ineq - # if !flip - # cons = @constraint(model, con, pc * x >= 3 * p) - # else - # cons = @constraint(model, con, pc * x <= 3 * p) - # end - # else + if ineq + if !flip + cons = @constraint(model, con, pc * x >= 3 * p) + else + cons = @constraint(model, con, pc * x <= 3 * p) + end + else cons = @constraint(model, con, pc * x == 3 * p) - # end - # sign = flip ? -1 : 1 - # if _min - # @objective(model, Min, 2x * sign) - # else - # @objective(model, Max, -2x * sign) - # end + end - for obj_coef in [-3, 2, 5] - @objective(model, Min, obj_coef * x) + for obj_coef in [2, 5] + + sign = flip ? -1 : 1 + dir = _min ? 1 : -1 + if _min + @objective(model, Min, dir * obj_coef * x * sign) + else + @objective(model, Max, dir * obj_coef * x * sign) + end optimize!(model) @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL @@ -97,27 +139,24 @@ function test_obj() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL - @test DiffOpt.get_reverse_parameter(model, pc) ≈ -obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ dir * sign * obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ - dir * sign * obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_forward_objective(model) ≈ dir * sign * obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL - # stop differentiating with respect to p + # stop differentiating with respect to p DiffOpt.empty_input_sensitivities!(model) # differentiate w.r.t. pc direction_pc = 10.0 DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) @test DiffOpt.get_forward_objective(model) ≈ - - obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL - + - dir * sign * obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL end - - end end From b9d2e2bb5865e23084895001c80764b78ff2aef6 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 1 Dec 2025 02:36:20 -0300 Subject: [PATCH 3/9] format --- src/jump_wrapper.jl | 6 +----- src/moi_wrapper.jl | 31 +++++++++---------------------- test/jump_wrapper.jl | 25 +++++++++++++++---------- 3 files changed, 25 insertions(+), 37 deletions(-) diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index b88f6fd04..f78efd748 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -144,16 +144,12 @@ function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) return MOI.get(model, ForwardVariablePrimal(), variable) end - """ set_reverse_objective(model::JuMP.Model, value::Number) Set the value of the objective input sensitivity for reverse mode. """ -function set_reverse_objective( - model::JuMP.Model, - value::Number, -) +function set_reverse_objective(model::JuMP.Model, value::Number) return MOI.set(model, ReverseObjectiveSensitivity(), value) end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index d876388a5..59a7f4db0 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -583,7 +583,10 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) catch e if e isa MOI.UnsupportedAttribute - _fallback_set_reverse_objective_sensitivity(model, model.input_cache.dobj) + _fallback_set_reverse_objective_sensitivity( + model, + model.input_cache.dobj, + ) else rethrow(e) end @@ -594,14 +597,8 @@ end function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) diff = _diff(model) - obj_type = MOI.get( - model, - MOI.ObjectiveFunctionType(), - ) - obj_func = MOI.get( - model, - MOI.ObjectiveFunction{obj_type}(), - ) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) df_dx = MOI.Nonlinear.SymbolicAD.simplify!( MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), @@ -896,14 +893,8 @@ end function _fallback_get_forward_objective_sensitivity(model::Optimizer) ret = 0.0 - obj_type = MOI.get( - model, - MOI.ObjectiveFunctionType(), - ) - obj_func = MOI.get( - model, - MOI.ObjectiveFunction{obj_type}(), - ) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) df_dx = MOI.Nonlinear.SymbolicAD.simplify!( MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), @@ -911,11 +902,7 @@ function _fallback_get_forward_objective_sensitivity(model::Optimizer) if iszero(df_dx) continue end - dx_dp = MOI.get( - model, - ForwardVariablePrimal(), - xi, - ) + dx_dp = MOI.get(model, ForwardVariablePrimal(), xi) if df_dx isa Number ret += df_dx * dx_dp elseif df_dx isa MOI.ScalarAffineFunction{Float64} diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index adbb8f1fd..8e48e06dd 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -30,7 +30,6 @@ function runtests() end function test_obj_simple() - for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), (DiffOpt.diff_model, SCS.Optimizer), @@ -64,14 +63,15 @@ function test_obj_simple() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ + sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL - + @test DiffOpt.get_forward_objective(model) ≈ + sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL end end @@ -79,7 +79,6 @@ function test_obj_simple() end function test_obj() - for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), (DiffOpt.diff_model, SCS.Optimizer), @@ -123,7 +122,6 @@ function test_obj() end for obj_coef in [2, 5] - sign = flip ? -1 : 1 dir = _min ? 1 : -1 if _min @@ -139,14 +137,20 @@ function test_obj() direction_obj = 2.0 DiffOpt.set_reverse_objective(model, direction_obj) DiffOpt.reverse_differentiate!(model) - @test DiffOpt.get_reverse_parameter(model, p) ≈ dir * sign * obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL - @test DiffOpt.get_reverse_parameter(model, pc) ≈ - dir * sign * obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, p) ≈ + dir * sign * obj_coef * direction_obj * 3 / pc_val atol = + ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ + - dir * sign * obj_coef * direction_obj * 3 * p_val / + (pc_val^2) atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) direction_p = 3.0 DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) - @test DiffOpt.get_forward_objective(model) ≈ dir * sign * obj_coef * direction_p * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_forward_objective(model) ≈ + dir * sign * obj_coef * direction_p * 3 / pc_val atol = + ATOL rtol = RTOL # stop differentiating with respect to p DiffOpt.empty_input_sensitivities!(model) @@ -155,7 +159,8 @@ function test_obj() DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) @test DiffOpt.get_forward_objective(model) ≈ - - dir * sign * obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + - dir * sign * obj_coef * direction_pc * 3 * p_val / + pc_val^2 atol = ATOL rtol = RTOL end end end From 0e1ea7abf25c03fb8a720d2cd0551ad5468146d5 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sun, 15 Feb 2026 18:40:58 -0300 Subject: [PATCH 4/9] add quad tests --- src/moi_wrapper.jl | 2 +- test/jump_wrapper.jl | 56 ++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 53 insertions(+), 5 deletions(-) diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 59a7f4db0..675465d3c 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -614,7 +614,7 @@ function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) dd += term.coefficient * xj_val * val end - dd += df_dx * val + dd += df_dx.constant * val else error( "Cannot compute forward objective sensitivity fallback: " * diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 8e48e06dd..f203453e8 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -11,9 +11,7 @@ import DiffOpt import HiGHS import Ipopt import SCS -import ParametricOptInterface as POI import MathOptInterface as MOI -import ParametricOptInterface as POI const ATOL = 1e-3 const RTOL = 1e-3 @@ -78,6 +76,56 @@ function test_obj_simple() return end +function test_obj_simple_quad() + for (MODEL, SOLVER) in [ + # (DiffOpt.diff_model, HiGHS.Optimizer), + # (DiffOpt.diff_model, SCS.Optimizer), + # (DiffOpt.diff_model, Ipopt.Optimizer), + ], + sign in [+1, -1], + sign_p in [-1, +1], + sense in [:Min, :Max], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + + @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @constraint(model, con, x == 3 * sign_p * p) + @objective(model, Min, sign * (2 * x^2 + 7x)) + if sense == :Max + @objective(model, Max, sign * (2 * x^2 + 7x)) + end + optimize!(model) + @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL + + # DiffOpt.empty_input_sensitivities!(model) + # direction_obj = 2.0 + # DiffOpt.set_reverse_objective(model, direction_obj) + # DiffOpt.reverse_differentiate!(model) + # @test DiffOpt.get_reverse_parameter(model, p) ≈ + # sign_p * sign * 3 * (2 * value(x) + 7) * direction_obj atol = + # ATOL rtol = RTOL + + # DiffOpt.empty_input_sensitivities!(model) + # direction_p = 3.0 + # DiffOpt.set_forward_parameter(model, p, direction_p) + # DiffOpt.forward_differentiate!(model) + # @test DiffOpt.get_forward_objective(model) ≈ + # sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL + end + end + + return +end + function test_obj() for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), @@ -141,7 +189,7 @@ function test_obj() dir * sign * obj_coef * direction_obj * 3 / pc_val atol = ATOL rtol = RTOL @test DiffOpt.get_reverse_parameter(model, pc) ≈ - - dir * sign * obj_coef * direction_obj * 3 * p_val / + -dir * sign * obj_coef * direction_obj * 3 * p_val / (pc_val^2) atol = ATOL rtol = RTOL DiffOpt.empty_input_sensitivities!(model) @@ -159,7 +207,7 @@ function test_obj() DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) @test DiffOpt.get_forward_objective(model) ≈ - - dir * sign * obj_coef * direction_pc * 3 * p_val / + -dir * sign * obj_coef * direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL end end From 99dd27cc549402c2fc7f77daf7ba1fb99807c4aa Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sun, 15 Feb 2026 19:29:22 -0300 Subject: [PATCH 5/9] fix tests --- test/jump_wrapper.jl | 47 ++++++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index f203453e8..da159b562 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -77,10 +77,17 @@ function test_obj_simple() end function test_obj_simple_quad() + # Note: conic_diff_model excluded - doesn't properly support quadratic objectives for (MODEL, SOLVER) in [ - # (DiffOpt.diff_model, HiGHS.Optimizer), - # (DiffOpt.diff_model, SCS.Optimizer), - # (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), ], sign in [+1, -1], sign_p in [-1, +1], @@ -90,6 +97,11 @@ function test_obj_simple_quad() if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer continue end + # Skip invalid quadratic cases: convex (sign=1) needs Min, concave (sign=-1) needs Max + if SOLVER != Ipopt.Optimizer && + ((sign == 1 && sense == :Max) || (sign == -1 && sense == :Min)) + continue + end @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin model = MODEL(SOLVER; with_bridge_type) @@ -106,20 +118,21 @@ function test_obj_simple_quad() optimize!(model) @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL - # DiffOpt.empty_input_sensitivities!(model) - # direction_obj = 2.0 - # DiffOpt.set_reverse_objective(model, direction_obj) - # DiffOpt.reverse_differentiate!(model) - # @test DiffOpt.get_reverse_parameter(model, p) ≈ - # sign_p * sign * 3 * (2 * value(x) + 7) * direction_obj atol = - # ATOL rtol = RTOL - - # DiffOpt.empty_input_sensitivities!(model) - # direction_p = 3.0 - # DiffOpt.set_forward_parameter(model, p, direction_p) - # DiffOpt.forward_differentiate!(model) - # @test DiffOpt.get_forward_objective(model) ≈ - # sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ + sign_p * sign * 3 * (4 * value(x) + 7) * direction_obj atol = + ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + sign_p * sign * 3 * (4 * value(x) + 7) * direction_p atol = + ATOL rtol = RTOL end end From 84fffde10035956d612d6ce9dadbbc9fbd3bc389 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sun, 15 Feb 2026 19:56:00 -0300 Subject: [PATCH 6/9] add grad tests --- src/moi_wrapper.jl | 91 +++++++++++++++++++++++----------------------- 1 file changed, 46 insertions(+), 45 deletions(-) diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 675465d3c..63f980f15 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -595,33 +595,52 @@ function reverse_differentiate!(model::Optimizer) return reverse_differentiate!(diff) end +# Gradient evaluation functions for objective sensitivity fallbacks +function _eval_gradient(::Optimizer, ::Number) + return Dict{MOI.VariableIndex,Float64}() +end + +function _eval_gradient(::Optimizer, f::MOI.VariableIndex) + return Dict{MOI.VariableIndex,Float64}(f => 1.0) +end + +function _eval_gradient(::Optimizer, f::MOI.ScalarAffineFunction{Float64}) + grad = Dict{MOI.VariableIndex,Float64}() + for term in f.terms + grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient + end + return grad +end + +function _eval_gradient(model::Optimizer, f::MOI.ScalarQuadraticFunction{Float64}) + grad = Dict{MOI.VariableIndex,Float64}() + for term in f.affine_terms + grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient + end + # MOI convention: function is 0.5 * x' * Q * x, so derivative of diagonal + # term 0.5 * coef * xi^2 is coef * xi (not 2 * coef * xi) + for term in f.quadratic_terms + xi, xj = term.variable_1, term.variable_2 + coef = term.coefficient + xi_val = MOI.get(model, MOI.VariablePrimal(), xi) + xj_val = MOI.get(model, MOI.VariablePrimal(), xj) + if xi == xj + grad[xi] = get(grad, xi, 0.0) + coef * xi_val + else + grad[xi] = get(grad, xi, 0.0) + coef * xj_val + grad[xj] = get(grad, xj, 0.0) + coef * xi_val + end + end + return grad +end + function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) diff = _diff(model) obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) - for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) - df_dx = MOI.Nonlinear.SymbolicAD.simplify!( - MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), - ) - if iszero(df_dx) - continue - end - dd = 0.0 - if df_dx isa Number - dd = df_dx * val - elseif df_dx isa MOI.ScalarAffineFunction{Float64} - for term in df_dx.terms - xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) - dd += term.coefficient * xj_val * val - end - dd += df_dx.constant * val - else - error( - "Cannot compute forward objective sensitivity fallback: " * - "unsupported derivative found.", - ) - end - MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], dd) + grad = _eval_gradient(model, obj_func) + for (xi, df_dxi) in grad + MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], df_dxi * val) end return end @@ -892,31 +911,13 @@ function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) end function _fallback_get_forward_objective_sensitivity(model::Optimizer) - ret = 0.0 obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) - for xi in MOI.Nonlinear.SymbolicAD.variables(obj_func) - df_dx = MOI.Nonlinear.SymbolicAD.simplify!( - MOI.Nonlinear.SymbolicAD.derivative(obj_func, xi), - ) - if iszero(df_dx) - continue - end + grad = _eval_gradient(model, obj_func) + ret = 0.0 + for (xi, df_dxi) in grad dx_dp = MOI.get(model, ForwardVariablePrimal(), xi) - if df_dx isa Number - ret += df_dx * dx_dp - elseif df_dx isa MOI.ScalarAffineFunction{Float64} - for term in df_dx.terms - xj_val = MOI.get(model, MOI.VariablePrimal(), term.variable) - ret += term.coefficient * xj_val * dx_dp - end - ret += df_dx.constant * dx_dp - else - error( - "Cannot compute forward objective sensitivity fallback: " * - "unsupported derivative found.", - ) - end + ret += df_dxi * dx_dp end return ret end From c997393c02b9272f9834e5d065e6d23ebe0b28cd Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sun, 15 Feb 2026 21:02:40 -0300 Subject: [PATCH 7/9] fixes --- src/moi_wrapper.jl | 12 ++- test/conic_program.jl | 33 -------- test/moi_wrapper.jl | 160 ++++++++++++++++++++++++++++++++++++-- test/quadratic_program.jl | 28 ------- 4 files changed, 162 insertions(+), 71 deletions(-) diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 63f980f15..7562c0a0c 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -612,7 +612,10 @@ function _eval_gradient(::Optimizer, f::MOI.ScalarAffineFunction{Float64}) return grad end -function _eval_gradient(model::Optimizer, f::MOI.ScalarQuadraticFunction{Float64}) +function _eval_gradient( + model::Optimizer, + f::MOI.ScalarQuadraticFunction{Float64}, +) grad = Dict{MOI.VariableIndex,Float64}() for term in f.affine_terms grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient @@ -640,7 +643,12 @@ function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) grad = _eval_gradient(model, obj_func) for (xi, df_dxi) in grad - MOI.set(diff, ReverseVariablePrimal(), model.index_map[xi], df_dxi * val) + MOI.set( + diff, + ReverseVariablePrimal(), + model.index_map[xi], + df_dxi * val, + ) end return end diff --git a/test/conic_program.jl b/test/conic_program.jl index 9c26289bc..93f152610 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -841,39 +841,6 @@ function test_jump_psd_cone_with_parameter_pv_v_pv() @test dx ≈ 0.0 atol = 1e-4 rtol = 1e-4 end -function test_ObjectiveSensitivity() - model = DiffOpt.conic_diff_model(SCS.Optimizer) - @variable(model, x) - @variable(model, p in MOI.Parameter(1.0)) - @constraint( - model, - con, - [p * x, (2 * x - 3), p * 3 * x] in - MOI.PositiveSemidefiniteConeTriangle(2) - ) - @objective(model, Min, x) - optimize!(model) - direction_p = 2.0 - DiffOpt.set_forward_parameter(model, p, direction_p) - - DiffOpt.forward_differentiate!(model) - - # TODO: Change when implemented - @test_throws ErrorException( - "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", - ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) - - # Clean up - DiffOpt.empty_input_sensitivities!(model) - - # TODO: Change when implemented - MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) - - @test_throws ErrorException( - "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", - ) DiffOpt.reverse_differentiate!(model) -end - end # module TestConicProgram.runtests() diff --git a/test/moi_wrapper.jl b/test/moi_wrapper.jl index 40f3d653f..2b92ee4b0 100644 --- a/test/moi_wrapper.jl +++ b/test/moi_wrapper.jl @@ -39,14 +39,17 @@ function test_moi_test_runtests() model, config; exclude = Any[ - # removed because of the `ZerosBridge` issue: - # https://github.com/jump-dev/MathOptInterface.jl/issues/2861 - # - zeros bridge does not support duals because it cumbersome - # - many bridges do not support get ConstraintFunction because it is cumbersome - # so there is no way out of this error for now. - # at the same time this is a modeling corner case tha could be avoided - # by the user. - "test_conic_linear_VectorOfVariables_2"], + # removed because of the `ZerosBridge` issue: + # https://github.com/jump-dev/MathOptInterface.jl/issues/2861 + # - zeros bridge does not support duals because it cumbersome + # - many bridges do not support get ConstraintFunction because it is cumbersome + # so there is no way out of this error for now. + # at the same time this is a modeling corner case tha could be avoided + # by the user. + "test_conic_linear_VectorOfVariables_2", + "test_nonlinear_expression_hs110", + "test_nonlinear_expression_quartic", + ], ) return end @@ -135,6 +138,147 @@ function test_dU_from_dQ() return end +function test_eval_gradient_number() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + grad = DiffOpt._eval_gradient(model, 5.0) + @test isempty(grad) + grad = DiffOpt._eval_gradient(model, 0.0) + @test isempty(grad) +end + +function test_eval_gradient_variable_index() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + x = MOI.add_variable(model) + grad = DiffOpt._eval_gradient(model, x) + @test length(grad) == 1 + @test grad[x] == 1.0 +end + +function test_eval_gradient_scalar_affine_function() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + # f = 3x + 5y + 7 + f = MOI.ScalarAffineFunction( + [MOI.ScalarAffineTerm(3.0, x), MOI.ScalarAffineTerm(5.0, y)], + 7.0, + ) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] == 3.0 + @test grad[y] == 5.0 +end + +function test_eval_gradient_scalar_affine_function_repeated_variable() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + x = MOI.add_variable(model) + # f = 3x + 2x = 5x (repeated variable in terms) + f = MOI.ScalarAffineFunction( + [MOI.ScalarAffineTerm(3.0, x), MOI.ScalarAffineTerm(2.0, x)], + 0.0, + ) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 1 + @test grad[x] == 5.0 +end + +function test_eval_gradient_quadratic_diagonal() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.GreaterThan(0.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # f = 2x^2 (MOI stores as 0.5 * Q, so coefficient is 4 for 2x^2) + # df/dx = 4x + f = MOI.ScalarQuadraticFunction( + [MOI.ScalarQuadraticTerm(4.0, x, x)], # 0.5 * 4 * x^2 = 2x^2 + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + # At x=0, gradient should be 0 + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 1 + @test grad[x] ≈ 0.0 atol = ATOL + + # Now test with x = 3 + model2 = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model2, MOI.Silent(), true) + x2 = MOI.add_variable(model2) + MOI.add_constraint(model2, x2, MOI.EqualTo(3.0)) + MOI.set(model2, MOI.ObjectiveSense(), MOI.MIN_SENSE) + f2 = MOI.ScalarQuadraticFunction( + [MOI.ScalarQuadraticTerm(4.0, x2, x2)], + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model2, MOI.ObjectiveFunction{typeof(f2)}(), f2) + MOI.optimize!(model2) + grad2 = DiffOpt._eval_gradient(model2, f2) + # df/dx = 4 * 3 = 12 + @test grad2[x2] ≈ 12.0 atol = ATOL +end + +function test_eval_gradient_quadratic_off_diagonal() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.EqualTo(2.0)) + MOI.add_constraint(model, y, MOI.EqualTo(5.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # Use convex objective: 3x^2 + 3y^2 + xy (Hessian [[6,1],[1,6]] is PD) + # df/dx = 6x + y = 12 + 5 = 17 + # df/dy = x + 6y = 2 + 30 = 32 + v1, v2 = x.value <= y.value ? (x, y) : (y, x) + f = MOI.ScalarQuadraticFunction( + [ + MOI.ScalarQuadraticTerm(6.0, x, x), + MOI.ScalarQuadraticTerm(1.0, v1, v2), + MOI.ScalarQuadraticTerm(6.0, y, y), + ], + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] ≈ 17.0 atol = ATOL + @test grad[y] ≈ 32.0 atol = ATOL +end + +function test_eval_gradient_quadratic_mixed() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.EqualTo(2.0)) + MOI.add_constraint(model, y, MOI.EqualTo(3.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # f = x^2 + 2xy + 3y^2 + 5x + 7y + # df/dx = 2x + 2y + 5 = 4 + 6 + 5 = 15 + # df/dy = 2x + 6y + 7 = 4 + 18 + 7 = 29 + v1, v2 = x.value <= y.value ? (x, y) : (y, x) + f = MOI.ScalarQuadraticFunction( + [ + MOI.ScalarQuadraticTerm(2.0, x, x), + MOI.ScalarQuadraticTerm(2.0, v1, v2), + MOI.ScalarQuadraticTerm(6.0, y, y), + ], + [MOI.ScalarAffineTerm(5.0, x), MOI.ScalarAffineTerm(7.0, y)], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] ≈ 15.0 atol = ATOL + @test grad[y] ≈ 29.0 atol = ATOL +end + end # module TestMOIWrapper.runtests() diff --git a/test/quadratic_program.jl b/test/quadratic_program.jl index 205b3b1d4..31cfa914a 100644 --- a/test/quadratic_program.jl +++ b/test/quadratic_program.jl @@ -349,34 +349,6 @@ function test_differentiating_non_trivial_convex_qp_moi() return end -function test_ObjectiveSensitivity() - model = DiffOpt.quadratic_diff_model(HiGHS.Optimizer) - @variable(model, x) - @variable(model, p in MOI.Parameter(1.0)) - @constraint(model, x >= p) - @objective(model, Min, x) - optimize!(model) - direction_p = 2.0 - DiffOpt.set_forward_parameter(model, p, direction_p) - - DiffOpt.forward_differentiate!(model) - - # TODO: Change when implemented - @test_throws ErrorException( - "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", - ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) - - # Clean up - DiffOpt.empty_input_sensitivities!(model) - - # TODO: Change when implemented - MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) - - @test_throws ErrorException( - "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", - ) DiffOpt.reverse_differentiate!(model) -end - end # module TestQuadraticProgram.runtests() From 5ea49756e2adbe74501bfb1b64f04ac7c76aae2e Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sun, 15 Feb 2026 22:28:57 -0300 Subject: [PATCH 8/9] fix cov --- src/moi_wrapper.jl | 5 ----- test/jump_wrapper.jl | 2 ++ 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 7562c0a0c..4cc5ddc64 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -574,11 +574,6 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end if !iszero(model.input_cache.dobj) - if !isempty(model.input_cache.dx) - error( - "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", - ) - end try MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) catch e diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index da159b562..86cb0f5df 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -328,6 +328,8 @@ function test_jump_api() -direction_x * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL end end + + return end end # module From 325fe187c5a6b2dd0b9fb689b630f572b7df5533 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 16 Feb 2026 21:43:14 -0300 Subject: [PATCH 9/9] add comments --- src/ConicProgram/ConicProgram.jl | 8 ++++++++ src/QuadraticProgram/QuadraticProgram.jl | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index 8c317600d..8141388fe 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -450,12 +450,20 @@ function MOI.get( return MOI.get(model.model, attr, ci) end +""" +Method not supported for `DiffOpt.ConicProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) return throw( MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end +""" +Method not supported for `DiffOpt.ConicProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) return throw( MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), diff --git a/src/QuadraticProgram/QuadraticProgram.jl b/src/QuadraticProgram/QuadraticProgram.jl index cf2da7886..6b74b0f59 100644 --- a/src/QuadraticProgram/QuadraticProgram.jl +++ b/src/QuadraticProgram/QuadraticProgram.jl @@ -501,12 +501,20 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver) return model.linear_solver = linear_solver end +""" +Method not supported for `DiffOpt.QuadraticProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) return throw( MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end +""" +Method not supported for `DiffOpt.QuadraticProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) return throw( MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),