diff --git a/Project.toml b/Project.toml index 750fce8df..fd4180820 100644 --- a/Project.toml +++ b/Project.toml @@ -30,7 +30,8 @@ julia = "1" ColunaDemos = "a54e61d4-7723-11e9-2469-af255fcaa246" GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +KnapsackLib = "86859df6-51c5-4863-9ac2-2c1ab8e53eb2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["ColunaDemos", "GLPK", "JuMP", "Test"] +test = ["ColunaDemos", "GLPK", "JuMP", "KnapsackLib", "Test"] diff --git a/src/Algorithm/basic/solveipform.jl b/src/Algorithm/basic/solveipform.jl index ad3f9efa2..6a29338f0 100644 --- a/src/Algorithm/basic/solveipform.jl +++ b/src/Algorithm/basic/solveipform.jl @@ -77,14 +77,14 @@ end # SolveIpForm does not have child algorithms, therefore get_child_algorithms() is not defined # Dispatch on the type of the optimizer to return the parameters -_optimizer_params(algo::SolveIpForm, ::MoiOptimizer) = algo.moi_params -_optimizer_params(algo::SolveIpForm, ::UserOptimizer) = algo.user_params -# TODO : custom optimizer -_optimizer_params(::SolveIpForm, ::NoOptimizer) = nothing +_optimizer_params(::Formulation, algo::SolveIpForm, ::MoiOptimizer) = algo.moi_params +_optimizer_params(::Formulation, algo::SolveIpForm, ::UserOptimizer) = algo.user_params +_optimizer_params(form::Formulation, algo::SolveIpForm, ::CustomOptimizer) = getinner(getoptimizer(form, algo.optimizer_id)) +_optimizer_params(::Formulation, ::SolveIpForm, ::NoOptimizer) = nothing function run!(algo::SolveIpForm, env::Env, form::Formulation, input::OptimizationInput)::OptimizationOutput opt = getoptimizer(form, algo.optimizer_id) - params = _optimizer_params(algo, opt) + params = _optimizer_params(form, algo, opt) if params !== nothing return run!(params, env, form, input; optimizer_id = algo.optimizer_id) end @@ -99,7 +99,7 @@ run!(algo::SolveIpForm, env::Env, reform::Reformulation, input::OptimizationInpu ################################################################################ function get_units_usage(algo::SolveIpForm, form::Formulation) opt = getoptimizer(form, algo.optimizer_id) - params = _optimizer_params(algo, opt) + params = _optimizer_params(form, algo, opt) if params !== nothing return get_units_usage(params, form) end @@ -135,7 +135,8 @@ function get_units_usage(::UserOptimize, spform::Formulation{DwSp}) return units_usage end -# TODO : get_units_usage of CustomOptimize +# no get_units_usage of CustomOptimize because it directly calls the +# get_units_usage of the custom optimizer ################################################################################ # run! methods (depends on the type of the optimizer) @@ -270,4 +271,5 @@ function run!( return OptimizationOutput(result) end -# TODO : run! of CustomOptimize \ No newline at end of file +# No run! method for CustomOptimize because it directly calls the run! method +# of the custom optimizer \ No newline at end of file diff --git a/src/Coluna.jl b/src/Coluna.jl index 2594af1c6..a8ed0640d 100644 --- a/src/Coluna.jl +++ b/src/Coluna.jl @@ -36,21 +36,25 @@ include("parameters.jl") include("ColunaBase/ColunaBase.jl") using .ColunaBase +include("MathProg/MathProg.jl") +using .MathProg + mutable struct Env env_starting_time::DateTime optim_starting_time::Union{Nothing, DateTime} params::Params kpis::Kpis form_counter::Int # 0 is for original form + varids::MOI.Utilities.CleverDicts.CleverDict{MOI.VariableIndex, MathProg.VarId} end -Env(params::Params) = Env(now(), nothing, params, Kpis(nothing, nothing), 0) +Env(params::Params) = Env( + now(), nothing, params, Kpis(nothing, nothing), 0, + MOI.Utilities.CleverDicts.CleverDict{MOI.VariableIndex, MathProg.VarId}() +) set_optim_start_time!(env::Env) = env.optim_starting_time = now() elapsed_optim_time(env::Env) = Dates.toms(now() - env.optim_starting_time) / Dates.toms(Second(1)) Base.isinteger(x::Float64, tol::Float64) = abs(round(x) - x) < tol -include("MathProg/MathProg.jl") -using .MathProg - include("Algorithm/Algorithm.jl") using .Algorithm diff --git a/src/MOIwrapper.jl b/src/MOIwrapper.jl index 2ad3e2443..75de5f631 100644 --- a/src/MOIwrapper.jl +++ b/src/MOIwrapper.jl @@ -21,7 +21,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer annotations::Annotations #varmap::Dict{MOI.VariableIndex,VarId} # For the user to get VariablePrimal vars::CleverDicts.CleverDict{MOI.VariableIndex, Variable} - varids::CleverDicts.CleverDict{MOI.VariableIndex, VarId} + #varids::CleverDicts.CleverDict{MOI.VariableIndex, VarId} moi_varids::Dict{VarId, MOI.VariableIndex} names_to_vars::Dict{String, MOI.VariableIndex} constrs::Dict{MOI.ConstraintIndex, Constraint} @@ -33,14 +33,13 @@ mutable struct Optimizer <: MOI.AbstractOptimizer feasibility_sense::Bool # Coluna supports only Max or Min. - function Optimizer() model = new() model.env = Env(Params()) model.inner = Problem(model.env) model.annotations = Annotations() model.vars = CleverDicts.CleverDict{MOI.VariableIndex, Variable}() - model.varids = CleverDicts.CleverDict{MOI.VariableIndex, VarId}() # TODO : check if necessary to have two dicts for variables + #model.varids = CleverDicts.CleverDict{MOI.VariableIndex, VarId}() # TODO : check if necessary to have two dicts for variables model.moi_varids = Dict{VarId, MOI.VariableIndex}() model.names_to_vars = Dict{String, MOI.VariableIndex}() model.constrs = Dict{MOI.ConstraintIndex, Union{Constraint, Nothing}}() @@ -79,7 +78,7 @@ end function _get_orig_varid(optimizer::Optimizer, x::MOI.VariableIndex) if haskey(optimizer.vars, x) - return optimizer.varids[x] + return optimizer.env.varids[x] end throw(MOI.InvalidIndex(x)) return origid @@ -113,7 +112,7 @@ function MOI.add_variable(model::Coluna.Optimizer) var = setvar!(orig_form, "v", OriginalVar) index = CleverDicts.add_item(model.vars, var) model.moi_varids[getid(var)] = index - index2 = CleverDicts.add_item(model.varids, getid(var)) + index2 = CleverDicts.add_item(model.env.varids, getid(var)) @assert index == index2 return index end @@ -581,7 +580,7 @@ function MOI.empty!(model::Coluna.Optimizer) model.inner = Problem(model.env) model.annotations = Annotations() model.vars = CleverDicts.CleverDict{MOI.VariableIndex, Variable}() - model.varids = CleverDicts.CleverDict{MOI.VariableIndex, VarId}() + model.env.varids = CleverDicts.CleverDict{MOI.VariableIndex, VarId}() model.moi_varids = Dict{VarId, MOI.VariableIndex}() model.constrs = Dict{MOI.ConstraintIndex, Constraint}() if model.default_optimizer_builder !== nothing diff --git a/src/MathProg/MathProg.jl b/src/MathProg/MathProg.jl index 8dcea7b75..f53ef77da 100644 --- a/src/MathProg/MathProg.jl +++ b/src/MathProg/MathProg.jl @@ -43,10 +43,10 @@ include("MOIinterface.jl") # TODO : clean up # Types -export MaxSense, MinSense, MoiOptimizer, +export MaxSense, MinSense, Id, ConstrSense, VarSense, FormId, FormulationPhase, Annotations, - Counter, UserOptimizer, NoOptimizer, MoiObjective + Counter, MoiObjective # Methods export no_optimizer_builder, set_original_formulation!, @@ -110,4 +110,7 @@ export PrimalBound, DualBound, PrimalSolution, DualSolution, ObjValues, # Methods related to projections export projection_is_possible, proj_cols_on_rep +# Optimizers of formulations +export MoiOptimizer, CustomOptimizer, UserOptimizer, NoOptimizer + end diff --git a/src/MathProg/formulation.jl b/src/MathProg/formulation.jl index 90ca0eba4..fae0fd4e6 100644 --- a/src/MathProg/formulation.jl +++ b/src/MathProg/formulation.jl @@ -25,7 +25,7 @@ Create a new formulation in the Coluna's environment `env` with duty `duty`, parent formulation `parent_formulation`, and objective sense `obj_sense`. """ function create_formulation!( - env::Coluna.Env, + env, duty::Type{<:AbstractFormDuty}; parent_formulation = nothing, obj_sense::Type{<:Coluna.AbstractSense} = MinSense diff --git a/src/MathProg/optimizerwrappers.jl b/src/MathProg/optimizerwrappers.jl index a8944417d..c6b3c3b23 100644 --- a/src/MathProg/optimizerwrappers.jl +++ b/src/MathProg/optimizerwrappers.jl @@ -147,3 +147,12 @@ function write_to_LP_file(form::Formulation, optimizer::MoiOptimizer, filename:: MOI.copy_to(dest, src) MOI.write_to_file(dest, filename) end + +""" + CustomOptimizer <: AbstractOptimizer +""" +struct CustomOptimizer <: AbstractOptimizer + inner::BD.AbstractCustomOptimizer +end + +getinner(optimizer::CustomOptimizer) = optimizer.inner diff --git a/src/MathProg/problem.jl b/src/MathProg/problem.jl index 592129c1e..c45630632 100644 --- a/src/MathProg/problem.jl +++ b/src/MathProg/problem.jl @@ -7,11 +7,11 @@ mutable struct Problem <: AbstractProblem end """ - Problem() + Problem(env) Constructs an empty `Problem`. """ -function Problem(env::Coluna.Env) +function Problem(env) original_formulation = create_formulation!(env, Original) return Problem( nothing, nothing, original_formulation, nothing, diff --git a/src/decomposition.jl b/src/decomposition.jl index aa1542d45..9c7e29dac 100644 --- a/src/decomposition.jl +++ b/src/decomposition.jl @@ -427,6 +427,7 @@ end _optimizerbuilder(opt::Function) = () -> UserOptimizer(opt) _optimizerbuilder(opt::MOI.AbstractOptimizer) = () -> MoiOptimizer(opt) +_optimizerbuilder(opt::BD.AbstractCustomOptimizer) = () -> CustomOptimizer(opt) function getoptimizerbuilders(prob::Problem, ann::BD.Annotation) optimizers = BD.getoptimizerbuilders(ann) diff --git a/test/Project.toml b/test/Project.toml index e1aa822be..592c7fb25 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -8,4 +8,5 @@ MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" LightGraphs = "093fc24a-ae57-5d10-9952-331d41423f4d" -Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" \ No newline at end of file +Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a" +KnapsackLib = "86859df6-51c5-4863-9ac2-2c1ab8e53eb2" \ No newline at end of file diff --git a/test/interfaces/model.jl b/test/interfaces/model.jl new file mode 100644 index 000000000..21bf0e3d0 --- /dev/null +++ b/test/interfaces/model.jl @@ -0,0 +1,131 @@ +# In this test, we use the Martinelli's knapsack solver pkg ( https://github.com/rafaelmartinelli/KnapsackLib.jl) +# to test the interface of custom models/solvers. + +using KnapsackLib +mutable struct KnapsackLibModel <: Coluna.MathProg.AbstractFormulation + nbitems::Int + costs::Vector{Float64} + weights::Vector{Float64} + capacity::Float64 + job_to_jumpvar::Dict{Int, JuMP.VariableRef} + #varids::Vector{Coluna.MathProg.VarId} + #map::Dict{Coluna.MathProg.VarId,Float64} +end +KnapsackLibModel(nbitems) = KnapsackLibModel( + nbitems, zeros(Float64, nbitems), zeros(Float64, nbitems), 0.0, + Dict{Int, JuMP.VariableRef}() +) +setcapacity!(model::KnapsackLibModel, cap) = model.capacity = cap +setweight!(model::KnapsackLibModel, j::Int, w) = model.weights[j] = w +setcost!(model::KnapsackLibModel, j::Int, c) = model.costs[j] = c +map!(model::KnapsackLibModel, j::Int, x::JuMP.VariableRef) = model.job_to_jumpvar[j] = x + +coluna_backend(model::MOI.Utilities.CachingOptimizer) = coluna_backend(model.optimizer) +coluna_backend(b::MOI.Bridges.AbstractBridgeOptimizer) = coluna_backend(b.model) +coluna_backend(model) = model + +mutable struct KnapsackLibOptimizer <: BlockDecomposition.AbstractCustomOptimizer + model::KnapsackLibModel +end + +function Coluna.Algorithm.get_units_usage(opt::KnapsackLibOptimizer, form) # form is Coluna Formulation + println("\e[41m get units usage \e[00m") + units_usage = Tuple{AbstractModel, Coluna.ColunaBase.UnitType, Coluna.ColunaBase.UnitAccessMode}[] + # TODO : the abstract model is KnapsackLibModel (opt.model) + return units_usage +end + +function _scale_to_int(vals...) + return map(x -> Integer(round(10000x)), vals) +end + +_getvarid(model::KnapsackLibModel, form, env::Env, j::Int) = Coluna.MathProg.getid(Coluna.MathProg.getvar(form, env.varids[model.job_to_jumpvar[j].index])) + +function Coluna.Algorithm.run!( + opt::KnapsackLibOptimizer, env::Coluna.Env, form::Coluna.MathProg.Formulation, + input::Coluna.Algorithm.OptimizationInput; kw... +) + costs = -[Coluna.MathProg.getcurcost(form, _getvarid(opt.model, form, env, j)) for j in 1:length(opt.model.costs)] + ws = _scale_to_int(opt.model.capacity, opt.model.weights...) + cs = _scale_to_int(costs...) + items = [KnapItem(w,c) for (w,c) in zip(ws[2:end], cs)] + data = KnapData(ws[1], items) + _, selected = solveKnapExpCore(data) + + # setup variable (issue https://github.com/atoptima/Coluna.jl/issues/283) + setup_var_id = [id for (id,v) in Iterators.filter( + v -> ( + Coluna.MathProg.iscuractive(form, v.first) && + Coluna.MathProg.isexplicit(form, v.first) && + Coluna.MathProg.getduty(v.first) <= Coluna.DwSpSetupVar + ), + Coluna.MathProg.getvars(form) + )][1] + + cost = sum(-costs[j] for j in selected) + Coluna.MathProg.getcurcost(form, setup_var_id) + + varids = Coluna.MathProg.VarId[] + varvals = Float64[] + + for j in selected + push!(varids, _getvarid(opt.model, form, env, j)) + push!(varvals, 1) + end + + push!(varids, setup_var_id) + push!(varvals, 1) + + sol = Coluna.MathProg.PrimalSolution(form, varids, varvals, cost, Coluna.MathProg.FEASIBLE_SOL) + + result = Coluna.Algorithm.OptimizationState(form; termination_status = Coluna.MathProg.OPTIMAL) + Coluna.Algorithm.add_ip_primal_sol!(result, sol) + dual_bound = Coluna.getvalue(Coluna.Algorithm.get_ip_primal_bound(result)) + Coluna.Algorithm.set_ip_dual_bound!(result, Coluna.DualBound(form, dual_bound)) + return Coluna.Algorithm.OptimizationOutput(result) +end + + +################################################################################ +# User model +################################################################################ +function knpcustommodel() + @testset "knapsack custom model" begin + data = CLD.GeneralizedAssignment.data("play2.txt") + coluna = JuMP.optimizer_with_attributes( + Coluna.Optimizer, + "params" => CL.Params(solver = ClA.TreeSearchAlgorithm()), + "default_optimizer" => GLPK.Optimizer + ) + + model = BlockModel(coluna; direct_model = true) + @axis(M, data.machines) + @variable(model, x[m in M, j in data.jobs], Bin) + @constraint(model, + sp[j in data.jobs], sum(x[m,j] for m in data.machines) == 1 + ) + @objective(model, Min, + sum(data.cost[j,m]*x[m,j] for m in M, j in data.jobs) + ) + + @dantzig_wolfe_decomposition(model, dec, M) + + sp = getsubproblems(dec) + for m in M + knp_model = KnapsackLibModel(length(data.jobs)) + setcapacity!(knp_model, data.capacity[m]) + for j in data.jobs + setweight!(knp_model, j, data.weight[j,m]) + setcost!(knp_model, j, data.cost[j,m]) + map!(knp_model, j, x[m,j]) + end + knp_optimizer = KnapsackLibOptimizer(knp_model) + specify!(sp[m], solver = knp_optimizer) ##model = knp_model) + end + + optimize!(model) + + @test JuMP.objective_value(model) ≈ 75.0 + end +end + +knpcustommodel() diff --git a/test/preprocessing_tests.jl b/test/preprocessing_tests.jl index f9a28c899..00b5f708a 100644 --- a/test/preprocessing_tests.jl +++ b/test/preprocessing_tests.jl @@ -111,7 +111,7 @@ function test_random_gap_instance() nb_prep_vars = 0 coluna_optimizer = problem.moi_backend master = CL.getmaster(coluna_optimizer.inner.re_formulation) - for (moi_index, varid) in coluna_optimizer.varids + for (moi_index, varid) in coluna_optimizer.env.varids var = CL.getvar(master, varid) if CL.getcurlb(master, var) == CL.getcurub(master, var) var_name = CL.getname(master, var) diff --git a/test/runtests.jl b/test/runtests.jl index e16190255..7ac7091cf 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -23,6 +23,7 @@ const ClA = Coluna.Algorithm include("unit/unit_tests.jl") include("MathOptInterface/MOI_wrapper.jl") +include("interfaces/model.jl") include("issues_tests.jl") include("show_functions_tests.jl") include("full_instances_tests.jl")