Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions src/ConicProgram/ConicProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -450,15 +450,23 @@ function MOI.get(
return MOI.get(model.model, attr, ci)
end

"""
Method not supported for `DiffOpt.ConicProgram.Model` directly.
However, a fallback is provided in `DiffOpt`.
"""
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
return error(
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
)
end

"""
Method not supported for `DiffOpt.ConicProgram.Model` directly.
However, a fallback is provided in `DiffOpt`.
"""
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
return error(
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
)
end

Expand Down
16 changes: 12 additions & 4 deletions src/QuadraticProgram/QuadraticProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -501,15 +501,23 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver)
return model.linear_solver = linear_solver
end

"""
Method not supported for `DiffOpt.QuadraticProgram.Model` directly.
However, a fallback is provided in `DiffOpt`.
"""
function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity)
return error(
"ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ForwardObjectiveSensitivity()),
)
end

"""
Method not supported for `DiffOpt.QuadraticProgram.Model` directly.
However, a fallback is provided in `DiffOpt`.
"""
function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val)
return error(
"ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend",
return throw(
MOI.UnsupportedAttribute(DiffOpt.ReverseObjectiveSensitivity()),
)
end

Expand Down
18 changes: 18 additions & 0 deletions src/jump_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -143,3 +143,21 @@ Get the value of a variable output sensitivity for forward mode.
function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
return MOI.get(model, ForwardVariablePrimal(), variable)
end

"""
set_reverse_objective(model::JuMP.Model, value::Number)

Set the value of the objective input sensitivity for reverse mode.
"""
function set_reverse_objective(model::JuMP.Model, value::Number)
return MOI.set(model, ReverseObjectiveSensitivity(), value)
end

"""
get_forward_objective(model::JuMP.Model)

Get the value of the objective output sensitivity for forward mode.
"""
function get_forward_objective(model::JuMP.Model)
return MOI.get(model, ForwardObjectiveSensitivity())
end
96 changes: 94 additions & 2 deletions src/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -574,11 +574,80 @@ function reverse_differentiate!(model::Optimizer)
MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value)
end
if !iszero(model.input_cache.dobj)
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
try
MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj)
catch e
if e isa MOI.UnsupportedAttribute
_fallback_set_reverse_objective_sensitivity(
model,
model.input_cache.dobj,
)
else
rethrow(e)
end
end
end
return reverse_differentiate!(diff)
end

# Gradient evaluation functions for objective sensitivity fallbacks
function _eval_gradient(::Optimizer, ::Number)
return Dict{MOI.VariableIndex,Float64}()
end

function _eval_gradient(::Optimizer, f::MOI.VariableIndex)
return Dict{MOI.VariableIndex,Float64}(f => 1.0)
end

function _eval_gradient(::Optimizer, f::MOI.ScalarAffineFunction{Float64})
grad = Dict{MOI.VariableIndex,Float64}()
for term in f.terms
grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient
end
return grad
end

function _eval_gradient(
model::Optimizer,
f::MOI.ScalarQuadraticFunction{Float64},
)
grad = Dict{MOI.VariableIndex,Float64}()
for term in f.affine_terms
grad[term.variable] = get(grad, term.variable, 0.0) + term.coefficient
end
# MOI convention: function is 0.5 * x' * Q * x, so derivative of diagonal
# term 0.5 * coef * xi^2 is coef * xi (not 2 * coef * xi)
for term in f.quadratic_terms
xi, xj = term.variable_1, term.variable_2
coef = term.coefficient
xi_val = MOI.get(model, MOI.VariablePrimal(), xi)
xj_val = MOI.get(model, MOI.VariablePrimal(), xj)
if xi == xj
grad[xi] = get(grad, xi, 0.0) + coef * xi_val
else
grad[xi] = get(grad, xi, 0.0) + coef * xj_val
grad[xj] = get(grad, xj, 0.0) + coef * xi_val
end
end
return grad
end

function _fallback_set_reverse_objective_sensitivity(model::Optimizer, val)
diff = _diff(model)
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
grad = _eval_gradient(model, obj_func)
for (xi, df_dxi) in grad
MOI.set(
diff,
ReverseVariablePrimal(),
model.index_map[xi],
df_dxi * val,
)
end
return
end

function _copy_forward_in_constraint(diff, index_map, con_map, constraints)
for (index, value) in constraints
MOI.set(
Expand Down Expand Up @@ -830,7 +899,30 @@ function MOI.get(
end

function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity)
return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr)
diff_model = _checked_diff(model, attr, :forward_differentiate!)
val = 0.0
try
val = MOI.get(diff_model, attr)
catch e
if e isa MOI.UnsupportedAttribute
val = _fallback_get_forward_objective_sensitivity(model)
else
rethrow(e)
end
end
return val
end

function _fallback_get_forward_objective_sensitivity(model::Optimizer)
obj_type = MOI.get(model, MOI.ObjectiveFunctionType())
obj_func = MOI.get(model, MOI.ObjectiveFunction{obj_type}())
grad = _eval_gradient(model, obj_func)
ret = 0.0
for (xi, df_dxi) in grad
dx_dp = MOI.get(model, ForwardVariablePrimal(), xi)
ret += df_dxi * dx_dp
end
return ret
end

function MOI.supports(
Expand Down
33 changes: 0 additions & 33 deletions test/conic_program.jl
Original file line number Diff line number Diff line change
Expand Up @@ -841,39 +841,6 @@ function test_jump_psd_cone_with_parameter_pv_v_pv()
@test dx ≈ 0.0 atol = 1e-4 rtol = 1e-4
end

function test_ObjectiveSensitivity()
model = DiffOpt.conic_diff_model(SCS.Optimizer)
@variable(model, x)
@variable(model, p in MOI.Parameter(1.0))
@constraint(
model,
con,
[p * x, (2 * x - 3), p * 3 * x] in
MOI.PositiveSemidefiniteConeTriangle(2)
)
@objective(model, Min, x)
optimize!(model)
direction_p = 2.0
DiffOpt.set_forward_parameter(model, p, direction_p)

DiffOpt.forward_differentiate!(model)

# TODO: Change when implemented
@test_throws ErrorException(
"ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend",
) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity())

# Clean up
DiffOpt.empty_input_sensitivities!(model)

# TODO: Change when implemented
MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5)

@test_throws ErrorException(
"ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend",
) DiffOpt.reverse_differentiate!(model)
end

end # module

TestConicProgram.runtests()
Loading
Loading