Skip to content

Commit b00ddfb

Browse files
committed
add JuMP API and test it
1 parent c54f321 commit b00ddfb

File tree

5 files changed

+237
-50
lines changed

5 files changed

+237
-50
lines changed

README.md

Lines changed: 18 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -33,15 +33,12 @@ examples, tutorials, and an API reference.
3333

3434
### DiffOpt-JuMP API with `Parameters`
3535

36+
Here is an example with a Parametric **Linear Program**:
37+
3638
```julia
3739
using JuMP, DiffOpt, HiGHS
3840

39-
model = Model(
40-
() -> DiffOpt.diff_optimizer(
41-
HiGHS.Optimizer;
42-
with_parametric_opt_interface = true,
43-
),
44-
)
41+
model = DiffOpt.quadratic_diff_model(HiGHS.Optimizer)
4542
set_silent(model)
4643

4744
p_val = 4.0
@@ -64,9 +61,9 @@ optimize!(model)
6461

6562
# differentiate w.r.t. p
6663
direction_p = 3.0
67-
MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(direction_p))
64+
DiffOpt.set_forward_parameter(model, p, direction_p)
6865
DiffOpt.forward_differentiate!(model)
69-
@show MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) == direction_p * 3 / pc_val
66+
@show DiffOpt.get_forward_variable(model, x) == direction_p * 3 / pc_val
7067

7168
# update p and pc
7269
p_val = 2.0
@@ -82,45 +79,30 @@ optimize!(model)
8279
DiffOpt.empty_input_sensitivities!(model)
8380
# differentiate w.r.t. pc
8481
direction_pc = 10.0
85-
MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(pc), Parameter(direction_pc))
82+
DiffOpt.set_forward_parameter(model, pc, direction_pc)
8683
DiffOpt.forward_differentiate!(model)
87-
@show abs(MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) -
84+
@show abs(DiffOpt.get_forward_variable(model, x) -
8885
-direction_pc * 3 * p_val / pc_val^2) < 1e-5
8986

9087
# always a good practice to clear previously set sensitivities
9188
DiffOpt.empty_input_sensitivities!(model)
9289
# Now, reverse model AD
9390
direction_x = 10.0
94-
MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_x)
91+
DiffOpt.set_reverse_variable(model, x, direction_x)
9592
DiffOpt.reverse_differentiate!(model)
96-
@show MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) == MOI.Parameter(direction_x * 3 / pc_val)
97-
@show abs(MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(pc)).value -
98-
-direction_x * 3 * p_val / pc_val^2) < 1e-5
93+
@show DiffOpt.get_reverse_parameter(model, p) == direction_x * 3 / pc_val
94+
@show DiffOpt.get_reverse_parameter(model, pc) == -direction_x * 3 * p_val / pc_val^2
9995
```
10096

101-
### Low level DiffOpt-JuMP API:
102-
103-
A brief example:
97+
Available models:
98+
* `DiffOpt.quadratic_diff_model`: Quadratic Programs (QP) and Linear Programs
99+
(LP)
100+
* `DiffOpt.conic_diff_model`: Conic Programs (CP) and Linear Programs (LP)
101+
* `DiffOpt.nonlinear_diff_model`: Nonlinear Programs (NLP), Quadratic Program
102+
(QP) and Linear Programs (LP)
103+
* `DiffOpt.diff_model`: Nonlinear Programs (NLP), Conic Programs (CP),
104+
Quadratic Programs (QP) and Linear Programs (LP)
104105

105-
```julia
106-
using JuMP, DiffOpt, HiGHS
107-
# Create a model using the wrapper
108-
model = Model(() -> DiffOpt.diff_optimizer(HiGHS.Optimizer))
109-
# Define your model and solve it
110-
@variable(model, x)
111-
@constraint(model, cons, x >= 3)
112-
@objective(model, Min, 2x)
113-
optimize!(model)
114-
# Choose the problem parameters to differentiate with respect to, and set their
115-
# perturbations.
116-
MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0)
117-
# Differentiate the model
118-
DiffOpt.reverse_differentiate!(model)
119-
# fetch the gradients
120-
grad_exp = MOI.get(model, DiffOpt.ReverseConstraintFunction(), cons) # -3 x - 1
121-
constant(grad_exp) # -1
122-
coefficient(grad_exp, x) # -3
123-
```
124106

125107
## Citing DiffOpt.jl
126108

src/DiffOpt.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@ function add_default_factorization(model)
4848
return
4949
end
5050

51+
include("jump_wrapper.jl")
52+
5153
export diff_optimizer
5254

5355
# TODO

src/jump_wrapper.jl

Lines changed: 93 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,15 @@
11
"""
2+
diff_model(optimizer_constructor; with_parametric_opt_interface::Bool = true, with_bridge_type = Float64, with_cache::Bool = true)
3+
4+
Create a JuMP model with a differentiable optimizer. The optimizer is created
5+
using `optimizer_constructor`. This model will try to select the proper
6+
differentiable optimization method based on the problem structure.
7+
8+
See also: [`nonlinear_diff_model`](@ref), [`conic_diff_model`](@ref), [`quadratic_diff_model`](@ref).
29
"""
310
function diff_model(
411
optimizer_constructor;
5-
with_parametric_opt_interface::Bool = false,
12+
with_parametric_opt_interface::Bool = true,
613
with_bridge_type = Float64,
714
with_cache::Bool = true,
815
)
@@ -15,11 +22,79 @@ function diff_model(
1522
return JuMP.direct_model(inner)
1623
end
1724

18-
# nonlinear_diff_model
19-
# conic_diff_model
20-
# quadratic_diff_model
25+
"""
26+
nonlinear_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true)
27+
28+
Create a JuMP model with a differentiable optimizer for nonlinear programs.
29+
The optimizer is created using `optimizer_constructor`.
30+
31+
See also: [`conic_diff_model`](@ref), [`quadratic_diff_model`](@ref), [`diff_model`](@ref).
32+
"""
33+
function nonlinear_diff_model(
34+
optimizer_constructor;
35+
with_bridge_type = Float64,
36+
with_cache::Bool = true,
37+
)
38+
inner = diff_optimizer(
39+
optimizer_constructor;
40+
with_parametric_opt_interface = false,
41+
with_bridge_type = with_bridge_type,
42+
with_cache = with_cache,
43+
)
44+
MOI.set(inner, ModelConstructor(), NonLinearProgram.Model)
45+
return JuMP.direct_model(inner)
46+
end
47+
48+
"""
49+
conic_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true)
50+
51+
Create a JuMP model with a differentiable optimizer for conic programs.
52+
The optimizer is created using `optimizer_constructor`.
53+
54+
See also: [`nonlinear_diff_model`](@ref), [`quadratic_diff_model`](@ref), [`diff_model`](@ref).
55+
"""
56+
function conic_diff_model(
57+
optimizer_constructor;
58+
with_bridge_type = Float64,
59+
with_cache::Bool = true,
60+
)
61+
inner = diff_optimizer(
62+
optimizer_constructor;
63+
with_parametric_opt_interface = true,
64+
with_bridge_type = with_bridge_type,
65+
with_cache = with_cache,
66+
)
67+
MOI.set(inner, ModelConstructor(), ConicProgram.Model)
68+
return JuMP.direct_model(inner)
69+
end
70+
71+
"""
72+
quadratic_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true)
73+
74+
Create a JuMP model with a differentiable optimizer for quadratic programs.
75+
The optimizer is created using `optimizer_constructor`.
76+
77+
See also: [`nonlinear_diff_model`](@ref), [`conic_diff_model`](@ref), [`diff_model`](@ref).
78+
"""
79+
function quadratic_diff_model(
80+
optimizer_constructor;
81+
with_bridge_type = Float64,
82+
with_cache::Bool = true,
83+
)
84+
inner = diff_optimizer(
85+
optimizer_constructor;
86+
with_parametric_opt_interface = true,
87+
with_bridge_type = with_bridge_type,
88+
with_cache = with_cache,
89+
)
90+
MOI.set(inner, ModelConstructor(), QuadraticProgram.Model)
91+
return JuMP.direct_model(inner)
92+
end
2193

2294
"""
95+
set_forward_parameter(model::JuMP.Model, variable::JuMP.VariableRef, value::Number)
96+
97+
Set the value of a parameter input sensitivity for forward mode.
2398
"""
2499
function set_forward_parameter(
25100
model::JuMP.Model,
@@ -28,34 +103,39 @@ function set_forward_parameter(
28103
)
29104
return MOI.set(
30105
model,
31-
DiffOpt.ForwardConstraintSet(),
106+
ForwardConstraintSet(),
32107
ParameterRef(variable),
33-
value,
108+
Parameter(value),
34109
)
35110
end
36111

37112
"""
113+
get_reverse_parameter(model::JuMP.Model, variable::JuMP.VariableRef)
114+
115+
Get the value of a parameter output sensitivity for reverse mode.
38116
"""
39117
function get_reverse_parameter(model::JuMP.Model, variable::JuMP.VariableRef)
40-
return MOI.get(
41-
model,
42-
DiffOpt.ReverseConstraintSet(),
43-
ParameterRef(variable),
44-
)
118+
return MOI.get(model, ReverseConstraintSet(), ParameterRef(variable)).value
45119
end
46120

47121
"""
122+
set_reverse_variable(model::JuMP.Model, variable::JuMP.VariableRef, value::Number)
123+
124+
Set the value of a variable input sensitivity for reverse mode.
48125
"""
49126
function set_reverse_variable(
50127
model::JuMP.Model,
51128
variable::JuMP.VariableRef,
52129
value::Number,
53130
)
54-
return MOI.set(model, DiffOpt.ReverseVariablePrimal(), variable, value)
131+
return MOI.set(model, ReverseVariablePrimal(), variable, value)
55132
end
56133

57134
"""
135+
get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
136+
137+
Get the value of a variable output sensitivity for forward mode.
58138
"""
59139
function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef)
60-
return MOI.get(model, DiffOpt.ForwardVariablePrimal(), variable)
140+
return MOI.get(model, ForwardVariablePrimal(), variable)
61141
end

src/moi_wrapper.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -863,7 +863,7 @@ function MOI.get(model::Optimizer, attr::DifferentiateTimeSec)
863863
end
864864

865865
function MOI.supports(
866-
model::Optimizer,
866+
::Optimizer,
867867
::NonLinearKKTJacobianFactorization,
868868
::Function,
869869
)

test/jump_wrapper.jl

Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
# Copyright (c) 2020: Akshay Sharma and contributors
2+
#
3+
# Use of this source code is governed by an MIT-style license that can be found
4+
# in the LICENSE.md file or at https://opensource.org/licenses/MIT.
5+
6+
module TestJuMPWrapper
7+
8+
using Test
9+
using JuMP
10+
import DiffOpt
11+
import HiGHS
12+
import Ipopt
13+
import SCS
14+
import MathOptInterface as MOI
15+
16+
const ATOL = 1e-3
17+
const RTOL = 1e-3
18+
19+
function runtests()
20+
for name in names(@__MODULE__; all = true)
21+
if startswith("$name", "test_")
22+
@testset "$(name)" begin
23+
getfield(@__MODULE__, name)()
24+
end
25+
end
26+
end
27+
return
28+
end
29+
30+
function test_jump_api()
31+
for (MODEL, SOLVER) in [
32+
(DiffOpt.quadratic_diff_model, HiGHS.Optimizer),
33+
(DiffOpt.quadratic_diff_model, SCS.Optimizer),
34+
(DiffOpt.quadratic_diff_model, Ipopt.Optimizer),
35+
# (DiffOpt.conic_diff_model, HiGHS.Optimizer),
36+
# (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign
37+
# (DiffOpt.conic_diff_model, Ipopt.Optimizer),
38+
# (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), # SQF ctr not supported?
39+
# (DiffOpt.nonlinear_diff_model, SCS.Optimizer), # returns zero for sensitivity
40+
(DiffOpt.nonlinear_diff_model, Ipopt.Optimizer),
41+
],
42+
ineq in [true, false],
43+
min in [true, false],
44+
flip in [true, false]
45+
46+
@testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(min ? "Min" : "Max"), $(flip ? "geq" : "leq")" begin
47+
model = MODEL(SOLVER)
48+
set_silent(model)
49+
50+
p_val = 4.0
51+
pc_val = 2.0
52+
@variable(model, x)
53+
@variable(model, p in Parameter(p_val))
54+
@variable(model, pc in Parameter(pc_val))
55+
if ineq
56+
if !flip
57+
cons = @constraint(model, pc * x >= 3 * p)
58+
else
59+
cons = @constraint(model, pc * x <= 3 * p)
60+
end
61+
else
62+
@constraint(model, cons, pc * x == 3 * p)
63+
end
64+
sign = flip ? -1 : 1
65+
if min
66+
@objective(model, Min, 2x * sign)
67+
else
68+
@objective(model, Max, -2x * sign)
69+
end
70+
optimize!(model)
71+
@test value(x) 3 * p_val / pc_val atol = ATOL rtol = RTOL
72+
73+
# the function is
74+
# x(p, pc) = 3p / pc
75+
# hence,
76+
# dx/dp = 3 / pc
77+
# dx/dpc = -3p / pc^2
78+
79+
# First, try forward mode AD
80+
81+
# differentiate w.r.t. p
82+
direction_p = 3.0
83+
DiffOpt.set_forward_parameter(model, p, direction_p)
84+
DiffOpt.forward_differentiate!(model)
85+
@test DiffOpt.get_forward_variable(model, x)
86+
direction_p * 3 / pc_val atol = ATOL rtol = RTOL
87+
88+
# update p and pc
89+
p_val = 2.0
90+
pc_val = 6.0
91+
set_parameter_value(p, p_val)
92+
set_parameter_value(pc, pc_val)
93+
# re-optimize
94+
optimize!(model)
95+
# check solution
96+
@test value(x) 3 * p_val / pc_val atol = ATOL rtol = RTOL
97+
98+
# stop differentiating with respect to p
99+
DiffOpt.empty_input_sensitivities!(model)
100+
# differentiate w.r.t. pc
101+
direction_pc = 10.0
102+
DiffOpt.set_forward_parameter(model, pc, direction_pc)
103+
DiffOpt.forward_differentiate!(model)
104+
@test DiffOpt.get_forward_variable(model, x)
105+
-direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL
106+
107+
# always a good practice to clear previously set sensitivities
108+
DiffOpt.empty_input_sensitivities!(model)
109+
# Now, reverse model AD
110+
direction_x = 10.0
111+
DiffOpt.set_reverse_variable(model, x, direction_x)
112+
DiffOpt.reverse_differentiate!(model)
113+
@test DiffOpt.get_reverse_parameter(model, p)
114+
direction_x * 3 / pc_val atol = ATOL rtol = RTOL
115+
@test DiffOpt.get_reverse_parameter(model, pc)
116+
-direction_x * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL
117+
end
118+
end
119+
end
120+
121+
end # module
122+
123+
TestJuMPWrapper.runtests()

0 commit comments

Comments
 (0)