Skip to content

Commit 737fff4

Browse files
🤖 Format .jl files (#184)
Co-authored-by: tmigot <tmigot@users.noreply.github.com>
1 parent 3685ced commit 737fff4

File tree

2 files changed

+199
-205
lines changed

2 files changed

+199
-205
lines changed

test/dummy-solver.jl

+197-201
Original file line numberDiff line numberDiff line change
@@ -1,201 +1,197 @@
1-
using NLPModels, SolverCore
2-
3-
# non-allocating reshape
4-
# see https://github.com/JuliaLang/julia/issues/36313
5-
reshape_array(a, dims) = invoke(Base._reshape, Tuple{AbstractArray, typeof(dims)}, a, dims)
6-
7-
mutable struct DummySolver{S} <: AbstractOptimizationSolver
8-
x::S # primal approximation
9-
gx::S # gradient of objective
10-
y::S # multipliers estimates
11-
rhs::S # right-hand size of Newton system
12-
jval::S # flattened Jacobian
13-
hval::S # flattened Hessian
14-
wval::S # flattened augmented matrix
15-
Δxy::S # search direction
16-
end
17-
18-
function DummySolver(nlp::AbstractNLPModel{T, S}) where {T, S <: AbstractVector{T}}
19-
nvar, ncon = nlp.meta.nvar, nlp.meta.ncon
20-
x = similar(nlp.meta.x0)
21-
gx = similar(nlp.meta.x0)
22-
y = similar(nlp.meta.y0)
23-
rhs = similar(nlp.meta.x0, nvar + ncon)
24-
jval = similar(nlp.meta.x0, ncon * nvar)
25-
hval = similar(nlp.meta.x0, nvar * nvar)
26-
wval = similar(nlp.meta.x0, (nvar + ncon) * (nvar + ncon))
27-
Δxy = similar(nlp.meta.x0, nvar + ncon)
28-
DummySolver{S}(x, gx, y, rhs, jval, hval, wval, Δxy)
29-
end
30-
31-
function dummy(
32-
nlp::AbstractNLPModel{T, S},
33-
args...;
34-
kwargs...,
35-
) where {T, S <: AbstractVector{T}}
36-
solver = DummySolver(nlp)
37-
stats = GenericExecutionStats(nlp)
38-
solve!(solver, nlp, stats, args...; kwargs...)
39-
end
40-
41-
function solve!(
42-
solver::DummySolver{S},
43-
nlp::AbstractNLPModel{T, S},
44-
stats::GenericExecutionStats;
45-
callback = (args...) -> nothing,
46-
x0::S = nlp.meta.x0,
47-
atol::Real = sqrt(eps(T)),
48-
rtol::Real = sqrt(eps(T)),
49-
max_eval::Int = 1000,
50-
max_time::Float64 = 30.0,
51-
verbose::Bool = true,
52-
) where {T, S <: AbstractVector{T}}
53-
start_time = time()
54-
elapsed_time = 0.0
55-
set_time!(stats, elapsed_time)
56-
57-
nvar, ncon = nlp.meta.nvar, nlp.meta.ncon
58-
x = solver.x .= x0
59-
rhs = solver.rhs
60-
dual = view(rhs, 1:nvar)
61-
cx = view(rhs, (nvar + 1):(nvar + ncon))
62-
gx = solver.gx
63-
y = solver.y
64-
jval = solver.jval
65-
hval = solver.hval
66-
wval = solver.wval
67-
Δxy = solver.Δxy
68-
nnzh = Int(nvar * (nvar + 1) / 2)
69-
nnzh == nlp.meta.nnzh || error("solver assumes Hessian is dense")
70-
nvar * ncon == nlp.meta.nnzj || error("solver assumes Jacobian is dense")
71-
72-
grad!(nlp, x, gx)
73-
dual .= gx
74-
75-
# assume the model returns a dense Jacobian in column-major order
76-
if ncon > 0
77-
cons!(nlp, x, cx)
78-
jac_coord!(nlp, x, jval)
79-
Jx = reshape_array(jval, (ncon, nvar))
80-
Jqr = qr(Jx')
81-
82-
# compute least-squares multipliers
83-
# by solving Jx' y = -gx
84-
gx .*= -1
85-
ldiv!(y, Jqr, gx)
86-
87-
# update dual <- dual + Jx' * y
88-
mul!(dual, Jx', y, one(T), one(T))
89-
end
90-
91-
iter = 0
92-
set_iter!(stats, iter)
93-
ϵd = atol + rtol * norm(dual)
94-
ϵp = atol
95-
96-
fx = obj(nlp, x)
97-
set_objective!(stats, fx)
98-
verbose && @info log_header([:iter, :f, :c, :dual, :t, :x], [Int, T, T, T, Float64, Char])
99-
verbose && @info log_row(Any[iter, fx, norm(cx), norm(dual), elapsed_time, 'c'])
100-
solved = norm(dual) < ϵd && norm(cx) < ϵp
101-
102-
set_status!(
103-
stats,
104-
get_status(
105-
nlp,
106-
elapsed_time = elapsed_time,
107-
iter = iter,
108-
optimal = solved,
109-
max_eval = max_eval,
110-
max_time = max_time,
111-
),
112-
)
113-
114-
while stats.status == :unknown
115-
# assume the model returns a dense Hessian in column-major order
116-
# NB: hess_coord!() only returns values in the lower triangle
117-
hess_coord!(nlp, x, y, view(hval, 1:nnzh))
118-
119-
# rearrange nonzeros so they correspond to a dense nvar x nvar matrix
120-
j = nvar * nvar
121-
i = nnzh
122-
k = 1
123-
while i > nvar
124-
for _ = 1:k
125-
hval[j] = hval[i]
126-
hval[i] = 0
127-
j -= 1
128-
i -= 1
129-
end
130-
j -= nvar - k
131-
k += 1
132-
end
133-
134-
# fill in augmented matrix
135-
# W = [H J']
136-
# [J 0 ]
137-
wval .= 0
138-
Wxy = reshape_array(wval, (nvar + ncon, nvar + ncon))
139-
Hxy = reshape_array(hval, (nvar, nvar))
140-
Wxy[1:nvar, 1:nvar] .= Hxy
141-
for i = 1:nvar
142-
Wxy[i, i] += sqrt(eps(T))
143-
end
144-
if ncon > 0
145-
Wxy[(nvar + 1):(nvar + ncon), 1:nvar] .= Jx
146-
end
147-
LBL = factorize(Symmetric(Wxy, :L))
148-
149-
ldiv!(Δxy, LBL, rhs)
150-
Δxy .*= -1
151-
@views Δx = Δxy[1:nvar]
152-
@views Δy = Δxy[(nvar + 1):(nvar + ncon)]
153-
x .+= Δx
154-
y .+= Δy
155-
156-
grad!(nlp, x, gx)
157-
dual .= gx
158-
if ncon > 0
159-
cons!(nlp, x, cx)
160-
jac_coord!(nlp, x, jval)
161-
Jx = reshape_array(jval, (ncon, nvar))
162-
Jqr = qr(Jx')
163-
gx .*= -1
164-
ldiv!(y, Jqr, gx)
165-
mul!(dual, Jx', y, one(T), one(T))
166-
end
167-
elapsed_time = time() - start_time
168-
set_time!(stats, elapsed_time)
169-
presid = norm(cx)
170-
dresid = norm(dual)
171-
set_residuals!(stats, presid, dresid)
172-
solved = dresid < ϵd && presid < ϵp
173-
174-
iter += 1
175-
fx = obj(nlp, x)
176-
set_iter!(stats, iter)
177-
set_objective!(stats, fx)
178-
179-
set_status!(
180-
stats,
181-
get_status(
182-
nlp,
183-
elapsed_time = elapsed_time,
184-
iter = iter,
185-
optimal = solved,
186-
max_eval = max_eval,
187-
max_time = max_time,
188-
),
189-
)
190-
191-
callback(nlp, solver, stats)
192-
193-
verbose && @info log_row(Any[iter, fx, norm(cx), norm(dual), elapsed_time, 'd'])
194-
end
195-
196-
set_residuals!(stats, norm(cx), norm(dual))
197-
z = has_bounds(nlp) ? zeros(T, nvar) : zeros(T, 0)
198-
set_multipliers!(stats, y, z, z)
199-
set_solution!(stats, x)
200-
return stats
201-
end
1+
using NLPModels, SolverCore
2+
3+
# non-allocating reshape
4+
# see https://github.com/JuliaLang/julia/issues/36313
5+
reshape_array(a, dims) = invoke(Base._reshape, Tuple{AbstractArray, typeof(dims)}, a, dims)
6+
7+
mutable struct DummySolver{S} <: AbstractOptimizationSolver
8+
x::S # primal approximation
9+
gx::S # gradient of objective
10+
y::S # multipliers estimates
11+
rhs::S # right-hand size of Newton system
12+
jval::S # flattened Jacobian
13+
hval::S # flattened Hessian
14+
wval::S # flattened augmented matrix
15+
Δxy::S # search direction
16+
end
17+
18+
function DummySolver(nlp::AbstractNLPModel{T, S}) where {T, S <: AbstractVector{T}}
19+
nvar, ncon = nlp.meta.nvar, nlp.meta.ncon
20+
x = similar(nlp.meta.x0)
21+
gx = similar(nlp.meta.x0)
22+
y = similar(nlp.meta.y0)
23+
rhs = similar(nlp.meta.x0, nvar + ncon)
24+
jval = similar(nlp.meta.x0, ncon * nvar)
25+
hval = similar(nlp.meta.x0, nvar * nvar)
26+
wval = similar(nlp.meta.x0, (nvar + ncon) * (nvar + ncon))
27+
Δxy = similar(nlp.meta.x0, nvar + ncon)
28+
DummySolver{S}(x, gx, y, rhs, jval, hval, wval, Δxy)
29+
end
30+
31+
function dummy(nlp::AbstractNLPModel{T, S}, args...; kwargs...) where {T, S <: AbstractVector{T}}
32+
solver = DummySolver(nlp)
33+
stats = GenericExecutionStats(nlp)
34+
solve!(solver, nlp, stats, args...; kwargs...)
35+
end
36+
37+
function solve!(
38+
solver::DummySolver{S},
39+
nlp::AbstractNLPModel{T, S},
40+
stats::GenericExecutionStats;
41+
callback = (args...) -> nothing,
42+
x0::S = nlp.meta.x0,
43+
atol::Real = sqrt(eps(T)),
44+
rtol::Real = sqrt(eps(T)),
45+
max_eval::Int = 1000,
46+
max_time::Float64 = 30.0,
47+
verbose::Bool = true,
48+
) where {T, S <: AbstractVector{T}}
49+
start_time = time()
50+
elapsed_time = 0.0
51+
set_time!(stats, elapsed_time)
52+
53+
nvar, ncon = nlp.meta.nvar, nlp.meta.ncon
54+
x = solver.x .= x0
55+
rhs = solver.rhs
56+
dual = view(rhs, 1:nvar)
57+
cx = view(rhs, (nvar + 1):(nvar + ncon))
58+
gx = solver.gx
59+
y = solver.y
60+
jval = solver.jval
61+
hval = solver.hval
62+
wval = solver.wval
63+
Δxy = solver.Δxy
64+
nnzh = Int(nvar * (nvar + 1) / 2)
65+
nnzh == nlp.meta.nnzh || error("solver assumes Hessian is dense")
66+
nvar * ncon == nlp.meta.nnzj || error("solver assumes Jacobian is dense")
67+
68+
grad!(nlp, x, gx)
69+
dual .= gx
70+
71+
# assume the model returns a dense Jacobian in column-major order
72+
if ncon > 0
73+
cons!(nlp, x, cx)
74+
jac_coord!(nlp, x, jval)
75+
Jx = reshape_array(jval, (ncon, nvar))
76+
Jqr = qr(Jx')
77+
78+
# compute least-squares multipliers
79+
# by solving Jx' y = -gx
80+
gx .*= -1
81+
ldiv!(y, Jqr, gx)
82+
83+
# update dual <- dual + Jx' * y
84+
mul!(dual, Jx', y, one(T), one(T))
85+
end
86+
87+
iter = 0
88+
set_iter!(stats, iter)
89+
ϵd = atol + rtol * norm(dual)
90+
ϵp = atol
91+
92+
fx = obj(nlp, x)
93+
set_objective!(stats, fx)
94+
verbose && @info log_header([:iter, :f, :c, :dual, :t, :x], [Int, T, T, T, Float64, Char])
95+
verbose && @info log_row(Any[iter, fx, norm(cx), norm(dual), elapsed_time, 'c'])
96+
solved = norm(dual) < ϵd && norm(cx) < ϵp
97+
98+
set_status!(
99+
stats,
100+
get_status(
101+
nlp,
102+
elapsed_time = elapsed_time,
103+
iter = iter,
104+
optimal = solved,
105+
max_eval = max_eval,
106+
max_time = max_time,
107+
),
108+
)
109+
110+
while stats.status == :unknown
111+
# assume the model returns a dense Hessian in column-major order
112+
# NB: hess_coord!() only returns values in the lower triangle
113+
hess_coord!(nlp, x, y, view(hval, 1:nnzh))
114+
115+
# rearrange nonzeros so they correspond to a dense nvar x nvar matrix
116+
j = nvar * nvar
117+
i = nnzh
118+
k = 1
119+
while i > nvar
120+
for _ = 1:k
121+
hval[j] = hval[i]
122+
hval[i] = 0
123+
j -= 1
124+
i -= 1
125+
end
126+
j -= nvar - k
127+
k += 1
128+
end
129+
130+
# fill in augmented matrix
131+
# W = [H J']
132+
# [J 0 ]
133+
wval .= 0
134+
Wxy = reshape_array(wval, (nvar + ncon, nvar + ncon))
135+
Hxy = reshape_array(hval, (nvar, nvar))
136+
Wxy[1:nvar, 1:nvar] .= Hxy
137+
for i = 1:nvar
138+
Wxy[i, i] += sqrt(eps(T))
139+
end
140+
if ncon > 0
141+
Wxy[(nvar + 1):(nvar + ncon), 1:nvar] .= Jx
142+
end
143+
LBL = factorize(Symmetric(Wxy, :L))
144+
145+
ldiv!(Δxy, LBL, rhs)
146+
Δxy .*= -1
147+
@views Δx = Δxy[1:nvar]
148+
@views Δy = Δxy[(nvar + 1):(nvar + ncon)]
149+
x .+= Δx
150+
y .+= Δy
151+
152+
grad!(nlp, x, gx)
153+
dual .= gx
154+
if ncon > 0
155+
cons!(nlp, x, cx)
156+
jac_coord!(nlp, x, jval)
157+
Jx = reshape_array(jval, (ncon, nvar))
158+
Jqr = qr(Jx')
159+
gx .*= -1
160+
ldiv!(y, Jqr, gx)
161+
mul!(dual, Jx', y, one(T), one(T))
162+
end
163+
elapsed_time = time() - start_time
164+
set_time!(stats, elapsed_time)
165+
presid = norm(cx)
166+
dresid = norm(dual)
167+
set_residuals!(stats, presid, dresid)
168+
solved = dresid < ϵd && presid < ϵp
169+
170+
iter += 1
171+
fx = obj(nlp, x)
172+
set_iter!(stats, iter)
173+
set_objective!(stats, fx)
174+
175+
set_status!(
176+
stats,
177+
get_status(
178+
nlp,
179+
elapsed_time = elapsed_time,
180+
iter = iter,
181+
optimal = solved,
182+
max_eval = max_eval,
183+
max_time = max_time,
184+
),
185+
)
186+
187+
callback(nlp, solver, stats)
188+
189+
verbose && @info log_row(Any[iter, fx, norm(cx), norm(dual), elapsed_time, 'd'])
190+
end
191+
192+
set_residuals!(stats, norm(cx), norm(dual))
193+
z = has_bounds(nlp) ? zeros(T, nvar) : zeros(T, 0)
194+
set_multipliers!(stats, y, z, z)
195+
set_solution!(stats, x)
196+
return stats
197+
end

0 commit comments

Comments
 (0)