diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index abf21231..8141388f 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -450,15 +450,23 @@ function MOI.get( return MOI.get(model.model, attr, ci) end +""" +Method not supported for `DiffOpt.ConicProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end +""" +Method not supported for `DiffOpt.ConicProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/QuadraticProgram/QuadraticProgram.jl b/src/QuadraticProgram/QuadraticProgram.jl index 1ed58887..6b74b0f5 100644 --- a/src/QuadraticProgram/QuadraticProgram.jl +++ b/src/QuadraticProgram/QuadraticProgram.jl @@ -501,15 +501,23 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver) return model.linear_solver = linear_solver end +""" +Method not supported for `DiffOpt.QuadraticProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) - return error( - "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()), ) end +""" +Method not supported for `DiffOpt.QuadraticProgram.Model` directly. +However, a fallback is provided in `DiffOpt`. +""" function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) - return error( - "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + return throw( + MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()), ) end diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index ce26c4a5..f78efd74 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -143,3 +143,21 @@ Get the value of a variable output sensitivity for forward mode. function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) return MOI.get(model, ForwardVariablePrimal(), variable) end + +""" + set_reverse_objective(model::JuMP.Model, value::Number) + +Set the value of the objective input sensitivity for reverse mode. +""" +function set_reverse_objective(model::JuMP.Model, value::Number) + return MOI.set(model, ReverseObjectiveSensitivity(), value) +end + +""" + get_forward_objective(model::JuMP.Model) + +Get the value of the objective output sensitivity for forward mode. +""" +function get_forward_objective(model::JuMP.Model) + return MOI.get(model, ForwardObjectiveSensitivity()) +end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 56cd2096..4cc5ddc6 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -574,11 +574,80 @@ function reverse_differentiate!(model::Optimizer) MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end if !iszero(model.input_cache.dobj) - MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + try + MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + catch e + if e isa MOI.UnsupportedAttribute + _fallback_set_reverse_objective_sensitivity( + model, + model.input_cache.dobj, + ) + else + rethrow(e) + end + end end return reverse_differentiate!(diff) end +# Gradient evaluation functions for objective sensitivity fallbacks +function _eval_gradient(::Optimizer, ::Number) + return Dict{MOI.VariableIndex,Float64}() +end + +function _eval_gradient(::Optimizer, f::MOI.VariableIndex) + return Dict{MOI.VariableIndex,Float64}(f => 1.0) +end + +function _eval_gradient(::Optimizer, f::MOI.ScalarAffineFunction{Float64}) + grad = Dict{MOI.VariableIndex,Float64}() + for term in f.terms + grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient + end + return grad +end + +function _eval_gradient( + model::Optimizer, + f::MOI.ScalarQuadraticFunction{Float64}, +) + grad = Dict{MOI.VariableIndex,Float64}() + for term in f.affine_terms + grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient + end + # MOI convention: function is 0.5 * x' * Q * x, so derivative of diagonal + # term 0.5 * coef * xi^2 is coef * xi (not 2 * coef * xi) + for term in f.quadratic_terms + xi, xj = term.variable_1, term.variable_2 + coef = term.coefficient + xi_val = MOI.get(model, MOI.VariablePrimal(), xi) + xj_val = MOI.get(model, MOI.VariablePrimal(), xj) + if xi == xj + grad[xi] = get(grad, xi, 0.0) + coef * xi_val + else + grad[xi] = get(grad, xi, 0.0) + coef * xj_val + grad[xj] = get(grad, xj, 0.0) + coef * xi_val + end + end + return grad +end + +function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val) + diff = _diff(model) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) + grad = _eval_gradient(model, obj_func) + for (xi, df_dxi) in grad + MOI.set( + diff, + ReverseVariablePrimal(), + model.index_map[xi], + df_dxi * val, + ) + end + return +end + function _copy_forward_in_constraint(diff, index_map, con_map, constraints) for (index, value) in constraints MOI.set( @@ -830,7 +899,30 @@ function MOI.get( end function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) - return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr) + diff_model = _checked_diff(model, attr, :forward_differentiate!) + val = 0.0 + try + val = MOI.get(diff_model, attr) + catch e + if e isa MOI.UnsupportedAttribute + val = _fallback_get_forward_objective_sensitivity(model) + else + rethrow(e) + end + end + return val +end + +function _fallback_get_forward_objective_sensitivity(model::Optimizer) + obj_type = MOI.get(model, MOI.ObjectiveFunctionType()) + obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}()) + grad = _eval_gradient(model, obj_func) + ret = 0.0 + for (xi, df_dxi) in grad + dx_dp = MOI.get(model, ForwardVariablePrimal(), xi) + ret += df_dxi * dx_dp + end + return ret end function MOI.supports( diff --git a/test/conic_program.jl b/test/conic_program.jl index 9c26289b..93f15261 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -841,39 +841,6 @@ function test_jump_psd_cone_with_parameter_pv_v_pv() @test dx ≈ 0.0 atol = 1e-4 rtol = 1e-4 end -function test_ObjectiveSensitivity() - model = DiffOpt.conic_diff_model(SCS.Optimizer) - @variable(model, x) - @variable(model, p in MOI.Parameter(1.0)) - @constraint( - model, - con, - [p * x, (2 * x - 3), p * 3 * x] in - MOI.PositiveSemidefiniteConeTriangle(2) - ) - @objective(model, Min, x) - optimize!(model) - direction_p = 2.0 - DiffOpt.set_forward_parameter(model, p, direction_p) - - DiffOpt.forward_differentiate!(model) - - # TODO: Change when implemented - @test_throws ErrorException( - "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", - ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) - - # Clean up - DiffOpt.empty_input_sensitivities!(model) - - # TODO: Change when implemented - MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) - - @test_throws ErrorException( - "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", - ) DiffOpt.reverse_differentiate!(model) -end - end # module TestConicProgram.runtests() diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index 6588ab0c..86cb0f5d 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -11,9 +11,7 @@ import DiffOpt import HiGHS import Ipopt import SCS -import ParametricOptInterface as POI import MathOptInterface as MOI -import ParametricOptInterface as POI const ATOL = 1e-3 const RTOL = 1e-3 @@ -29,6 +27,210 @@ function runtests() return end +function test_obj_simple() + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + ], + sign in [+1, -1], + sign_p in [-1, +1], + sense in [:Min, :Max], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + + @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @constraint(model, con, x == 3 * sign_p * p) + @objective(model, Min, 2 * sign * x) + if sense == :Max + @objective(model, Max, 2 * sign * x) + end + optimize!(model) + @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ + sign_p * sign * 6 * direction_obj atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + sign_p * sign * 6 * direction_p atol = ATOL rtol = RTOL + end + end + + return +end + +function test_obj_simple_quad() + # Note: conic_diff_model excluded - doesn't properly support quadratic objectives + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + sign in [+1, -1], + sign_p in [-1, +1], + sense in [:Min, :Max], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + # Skip invalid quadratic cases: convex (sign=1) needs Min, concave (sign=-1) needs Max + if SOLVER != Ipopt.Optimizer && + ((sign == 1 && sense == :Max) || (sign == -1 && sense == :Min)) + continue + end + + @testset "$(MODEL) with: $(SOLVER), bridge:$with_bridge_type, sign:$sign, sense: $sense, sign_p: $sign_p" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @constraint(model, con, x == 3 * sign_p * p) + @objective(model, Min, sign * (2 * x^2 + 7x)) + if sense == :Max + @objective(model, Max, sign * (2 * x^2 + 7x)) + end + optimize!(model) + @test value(x) ≈ sign_p * 3 * p_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ + sign_p * sign * 3 * (4 * value(x) + 7) * direction_obj atol = + ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + sign_p * sign * 3 * (4 * value(x) + 7) * direction_p atol = + ATOL rtol = RTOL + end + end + + return +end + +function test_obj() + for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, HiGHS.Optimizer), + (DiffOpt.diff_model, SCS.Optimizer), + (DiffOpt.diff_model, Ipopt.Optimizer), + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + (DiffOpt.conic_diff_model, HiGHS.Optimizer), + (DiffOpt.conic_diff_model, SCS.Optimizer), + (DiffOpt.conic_diff_model, Ipopt.Optimizer), + (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), + (DiffOpt.nonlinear_diff_model, SCS.Optimizer), + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + ineq in [true, false], + _min in [true, false], + flip in [true, false], + with_bridge_type in [Float64, nothing] + + if isnothing(with_bridge_type) && SOLVER === SCS.Optimizer + continue + end + + @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(_min ? "Min" : "Max"), $(flip ? "geq" : "leq") bridge:$with_bridge_type" begin + model = MODEL(SOLVER; with_bridge_type) + set_silent(model) + + p_val = 4.0 + pc_val = 2.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @variable(model, pc in Parameter(pc_val)) + if ineq + if !flip + cons = @constraint(model, con, pc * x >= 3 * p) + else + cons = @constraint(model, con, pc * x <= 3 * p) + end + else + cons = @constraint(model, con, pc * x == 3 * p) + end + + for obj_coef in [2, 5] + sign = flip ? -1 : 1 + dir = _min ? 1 : -1 + if _min + @objective(model, Min, dir * obj_coef * x * sign) + else + @objective(model, Max, dir * obj_coef * x * sign) + end + + optimize!(model) + @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_obj = 2.0 + DiffOpt.set_reverse_objective(model, direction_obj) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ + dir * sign * obj_coef * direction_obj * 3 / pc_val atol = + ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ + -dir * sign * obj_coef * direction_obj * 3 * p_val / + (pc_val^2) atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + dir * sign * obj_coef * direction_p * 3 / pc_val atol = + ATOL rtol = RTOL + + # stop differentiating with respect to p + DiffOpt.empty_input_sensitivities!(model) + # differentiate w.r.t. pc + direction_pc = 10.0 + DiffOpt.set_forward_parameter(model, pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_objective(model) ≈ + -dir * sign * obj_coef * direction_pc * 3 * p_val / + pc_val^2 atol = ATOL rtol = RTOL + end + end + end + + return +end + +# TODO test quadratic obj + function test_jump_api() for (MODEL, SOLVER) in [ (DiffOpt.diff_model, HiGHS.Optimizer), @@ -126,6 +328,8 @@ function test_jump_api() -direction_x * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL end end + + return end end # module diff --git a/test/moi_wrapper.jl b/test/moi_wrapper.jl index 40f3d653..2b92ee4b 100644 --- a/test/moi_wrapper.jl +++ b/test/moi_wrapper.jl @@ -39,14 +39,17 @@ function test_moi_test_runtests() model, config; exclude = Any[ - # removed because of the `ZerosBridge` issue: - # https://github.com/jump-dev/MathOptInterface.jl/issues/2861 - # - zeros bridge does not support duals because it cumbersome - # - many bridges do not support get ConstraintFunction because it is cumbersome - # so there is no way out of this error for now. - # at the same time this is a modeling corner case tha could be avoided - # by the user. - "test_conic_linear_VectorOfVariables_2"], + # removed because of the `ZerosBridge` issue: + # https://github.com/jump-dev/MathOptInterface.jl/issues/2861 + # - zeros bridge does not support duals because it cumbersome + # - many bridges do not support get ConstraintFunction because it is cumbersome + # so there is no way out of this error for now. + # at the same time this is a modeling corner case tha could be avoided + # by the user. + "test_conic_linear_VectorOfVariables_2", + "test_nonlinear_expression_hs110", + "test_nonlinear_expression_quartic", + ], ) return end @@ -135,6 +138,147 @@ function test_dU_from_dQ() return end +function test_eval_gradient_number() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + grad = DiffOpt._eval_gradient(model, 5.0) + @test isempty(grad) + grad = DiffOpt._eval_gradient(model, 0.0) + @test isempty(grad) +end + +function test_eval_gradient_variable_index() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + x = MOI.add_variable(model) + grad = DiffOpt._eval_gradient(model, x) + @test length(grad) == 1 + @test grad[x] == 1.0 +end + +function test_eval_gradient_scalar_affine_function() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + # f = 3x + 5y + 7 + f = MOI.ScalarAffineFunction( + [MOI.ScalarAffineTerm(3.0, x), MOI.ScalarAffineTerm(5.0, y)], + 7.0, + ) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] == 3.0 + @test grad[y] == 5.0 +end + +function test_eval_gradient_scalar_affine_function_repeated_variable() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + x = MOI.add_variable(model) + # f = 3x + 2x = 5x (repeated variable in terms) + f = MOI.ScalarAffineFunction( + [MOI.ScalarAffineTerm(3.0, x), MOI.ScalarAffineTerm(2.0, x)], + 0.0, + ) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 1 + @test grad[x] == 5.0 +end + +function test_eval_gradient_quadratic_diagonal() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.GreaterThan(0.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # f = 2x^2 (MOI stores as 0.5 * Q, so coefficient is 4 for 2x^2) + # df/dx = 4x + f = MOI.ScalarQuadraticFunction( + [MOI.ScalarQuadraticTerm(4.0, x, x)], # 0.5 * 4 * x^2 = 2x^2 + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + # At x=0, gradient should be 0 + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 1 + @test grad[x] ≈ 0.0 atol = ATOL + + # Now test with x = 3 + model2 = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model2, MOI.Silent(), true) + x2 = MOI.add_variable(model2) + MOI.add_constraint(model2, x2, MOI.EqualTo(3.0)) + MOI.set(model2, MOI.ObjectiveSense(), MOI.MIN_SENSE) + f2 = MOI.ScalarQuadraticFunction( + [MOI.ScalarQuadraticTerm(4.0, x2, x2)], + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model2, MOI.ObjectiveFunction{typeof(f2)}(), f2) + MOI.optimize!(model2) + grad2 = DiffOpt._eval_gradient(model2, f2) + # df/dx = 4 * 3 = 12 + @test grad2[x2] ≈ 12.0 atol = ATOL +end + +function test_eval_gradient_quadratic_off_diagonal() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.EqualTo(2.0)) + MOI.add_constraint(model, y, MOI.EqualTo(5.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # Use convex objective: 3x^2 + 3y^2 + xy (Hessian [[6,1],[1,6]] is PD) + # df/dx = 6x + y = 12 + 5 = 17 + # df/dy = x + 6y = 2 + 30 = 32 + v1, v2 = x.value <= y.value ? (x, y) : (y, x) + f = MOI.ScalarQuadraticFunction( + [ + MOI.ScalarQuadraticTerm(6.0, x, x), + MOI.ScalarQuadraticTerm(1.0, v1, v2), + MOI.ScalarQuadraticTerm(6.0, y, y), + ], + MOI.ScalarAffineTerm{Float64}[], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] ≈ 17.0 atol = ATOL + @test grad[y] ≈ 32.0 atol = ATOL +end + +function test_eval_gradient_quadratic_mixed() + model = DiffOpt.diff_optimizer(HiGHS.Optimizer) + MOI.set(model, MOI.Silent(), true) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + MOI.add_constraint(model, x, MOI.EqualTo(2.0)) + MOI.add_constraint(model, y, MOI.EqualTo(3.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + # f = x^2 + 2xy + 3y^2 + 5x + 7y + # df/dx = 2x + 2y + 5 = 4 + 6 + 5 = 15 + # df/dy = 2x + 6y + 7 = 4 + 18 + 7 = 29 + v1, v2 = x.value <= y.value ? (x, y) : (y, x) + f = MOI.ScalarQuadraticFunction( + [ + MOI.ScalarQuadraticTerm(2.0, x, x), + MOI.ScalarQuadraticTerm(2.0, v1, v2), + MOI.ScalarQuadraticTerm(6.0, y, y), + ], + [MOI.ScalarAffineTerm(5.0, x), MOI.ScalarAffineTerm(7.0, y)], + 0.0, + ) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + grad = DiffOpt._eval_gradient(model, f) + @test length(grad) == 2 + @test grad[x] ≈ 15.0 atol = ATOL + @test grad[y] ≈ 29.0 atol = ATOL +end + end # module TestMOIWrapper.runtests() diff --git a/test/quadratic_program.jl b/test/quadratic_program.jl index 205b3b1d..31cfa914 100644 --- a/test/quadratic_program.jl +++ b/test/quadratic_program.jl @@ -349,34 +349,6 @@ function test_differentiating_non_trivial_convex_qp_moi() return end -function test_ObjectiveSensitivity() - model = DiffOpt.quadratic_diff_model(HiGHS.Optimizer) - @variable(model, x) - @variable(model, p in MOI.Parameter(1.0)) - @constraint(model, x >= p) - @objective(model, Min, x) - optimize!(model) - direction_p = 2.0 - DiffOpt.set_forward_parameter(model, p, direction_p) - - DiffOpt.forward_differentiate!(model) - - # TODO: Change when implemented - @test_throws ErrorException( - "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", - ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) - - # Clean up - DiffOpt.empty_input_sensitivities!(model) - - # TODO: Change when implemented - MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) - - @test_throws ErrorException( - "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", - ) DiffOpt.reverse_differentiate!(model) -end - end # module TestQuadraticProgram.runtests()