diff --git a/src/NonLinearProgram/nlp_utilities.jl b/src/NonLinearProgram/nlp_utilities.jl index 8f3a79b1..edcbe6e8 100644 --- a/src/NonLinearProgram/nlp_utilities.jl +++ b/src/NonLinearProgram/nlp_utilities.jl @@ -489,11 +489,9 @@ function _compute_sensitivity(model::Model; tol = 1e-6) # Dual bounds upper ∂s[((num_w+num_cons+num_lower+1):end), :] *= -_sense_multiplier - grad = _compute_gradient(model) + # dual wrt parameter primal_idx = [i.value for i in model.cache.primal_vars] - params_idx = [i.value for i in model.cache.params] - df_dx = grad[primal_idx] - df_dp_direct = grad[params_idx] - df_dp = df_dx'∂s[1:num_vars, :] + df_dp_direct' + df_dx = _compute_gradient(model)[primal_idx] + df_dp = df_dx'∂s[1:num_vars, :] return ∂s, df_dp end diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 34a41b8d..c266c715 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -740,41 +740,6 @@ function test_ObjectiveSensitivity_model2() @test isapprox(dp, -1.5; atol = 1e-4) end -function test_ObjectiveSensitivity_direct_param_contrib() - model = DiffOpt.nonlinear_diff_model(Ipopt.Optimizer) - set_silent(model) - - p_val = 3.0 - @variable(model, p ∈ MOI.Parameter(p_val)) - @variable(model, x ≥ 1) - @objective(model, Min, p^2 * x^2) - - optimize!(model) - @assert is_solved_and_feasible(model) - - Δp = 0.1 - DiffOpt.set_forward_parameter(model, p, Δp) - DiffOpt.forward_differentiate!(model) - - df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) - @test isapprox(df_dp, 2 * p_val * Δp, atol = 1e-8) # ≈ 0.6 for p=3 - - ε = 1e-6 - df_dp_fd = - ( - begin - set_parameter_value(p, p_val + ε) - optimize!(model) - Δp * objective_value(model) - end - begin - set_parameter_value(p, p_val - ε) - optimize!(model) - Δp * objective_value(model) - end - ) / (2ε) - - @test isapprox(df_dp, df_dp_fd, atol = 1e-4) -end function test_ObjectiveSensitivity_subset_parameters() # Model with 10 parameters, differentiate only w.r.t. 3rd and 7th model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer))