diff --git a/docs/src/reference/models.md b/docs/src/reference/models.md index 76f9a9154b..5a35d03cb3 100644 --- a/docs/src/reference/models.md +++ b/docs/src/reference/models.md @@ -37,6 +37,7 @@ copy_to ```@docs AbstractModelAttribute Name +CoefficientType ObjectiveFunction ObjectiveFunctionType ObjectiveSense diff --git a/src/Bridges/bridge_optimizer.jl b/src/Bridges/bridge_optimizer.jl index 1d14256596..5a445d8076 100644 --- a/src/Bridges/bridge_optimizer.jl +++ b/src/Bridges/bridge_optimizer.jl @@ -803,6 +803,10 @@ function MOI.get(b::AbstractBridgeOptimizer, attr::MOI.ListOfModelAttributesSet) return unbridged_function(b, list) end +function MOI.get(b::AbstractBridgeOptimizer, attr::MOI.CoefficientType) + return MOI.get(b.model, attr) +end + function MOI.get( b::AbstractBridgeOptimizer, attr::Union{MOI.AbstractModelAttribute,MOI.AbstractOptimizerAttribute}, diff --git a/src/Utilities/cachingoptimizer.jl b/src/Utilities/cachingoptimizer.jl index 47cbe6fa83..864722fc2e 100644 --- a/src/Utilities/cachingoptimizer.jl +++ b/src/Utilities/cachingoptimizer.jl @@ -18,19 +18,21 @@ and links it with an optimizer. It supports incremental model construction and modification even when the optimizer doesn't. -A `CachingOptimizer` may be in one of three possible states (`CachingOptimizerState`): +A `CachingOptimizer` may be in one of three possible states +(`CachingOptimizerState`): * `NO_OPTIMIZER`: The CachingOptimizer does not have any optimizer. * `EMPTY_OPTIMIZER`: The CachingOptimizer has an empty optimizer. The optimizer is not synchronized with the cached model. -* `ATTACHED_OPTIMIZER`: The CachingOptimizer has an optimizer, and it is synchronized with the cached model. +* `ATTACHED_OPTIMIZER`: The CachingOptimizer has an optimizer, and it is + synchronized with the cached model. A `CachingOptimizer` has two modes of operation (`CachingOptimizerMode`): * `MANUAL`: The only methods that change the state of the `CachingOptimizer` are [`Utilities.reset_optimizer`](@ref), [`Utilities.drop_optimizer`](@ref), - and [`Utilities.attach_optimizer`](@ref). - Attempting to perform an operation in the incorrect state results in an error. + and [`Utilities.attach_optimizer`](@ref). Attempting to perform an operation + in the incorrect state results in an error. * `AUTOMATIC`: The `CachingOptimizer` changes its state when necessary. For example, `optimize!` will automatically call `attach_optimizer` (an optimizer must have been previously set). Attempting to add a constraint or @@ -45,25 +47,82 @@ mutable struct CachingOptimizer{OptimizerType,ModelType<:MOI.ModelLike} <: mode::CachingOptimizerMode model_to_optimizer_map::IndexMap optimizer_to_model_map::IndexMap - # CachingOptimizer externally uses the same variable and constraint indices - # as the model_cache. model_to_optimizer_map maps from the model_cache indices to the - # optimizer indices. + auto_bridge::Bool end +""" + CachingOptimizer( + model_cache::MOI.ModelLike, + optimizer::Union{Nothing,MOI.AbstractOptimizer} = nothing; + mode::CachingOptimizerMode = AUTOMATIC, + state::CachingOptimizerState = + optimizer === nothing ? NO_OPTIMIZER : EMPTY_OPTIMIZER, + auto_bridge::Bool = false, + ) + +Creates a `CachingOptimizer` using `model_cache` and `optimizer`. + +## Notes + + * If `auto_bridge == true`, when the caching optimizer encounters a constraint + or objective function that is not supported by `optimizer`, it automatically + adds a bridging layer to `optimizer`. + * If `auto_bridge == true`, and an optimizer is provided, the state is forced + to `EMPTY_OPTIMIZER`. + * If an `optimizer` is passed, the returned CachingOptimizer does not support + the function `reset_optimizer(model, new_optimizer)` if the type of + `new_optimizer` is different from the type of `optimizer`. + +## Examples + +```julia +model = MOI.Utilities.CachingOptimizer( + MOI.Utilities.Model{Float64}(), + GLPK.Optimizer(), +) +``` + +```julia +model = MOI.Utilities.CachingOptimizer( + MOI.Utilities.Model{Float64}(), + auto_bridge = true, +) +MOI.Utilities.reset_optimizer(model, GLPK.Optimizer()) +``` +""" function CachingOptimizer( model_cache::MOI.ModelLike, - mode::CachingOptimizerMode, + optimizer::Union{Nothing,MOI.AbstractOptimizer} = nothing; + mode::CachingOptimizerMode = AUTOMATIC, + state::CachingOptimizerState = optimizer === nothing ? NO_OPTIMIZER : + EMPTY_OPTIMIZER, + auto_bridge::Bool = false, ) - return CachingOptimizer{MOI.AbstractOptimizer,typeof(model_cache)}( - nothing, + T = optimizer !== nothing ? typeof(optimizer) : MOI.AbstractOptimizer + if optimizer !== nothing + @assert MOI.is_empty(model_cache) + @assert MOI.is_empty(optimizer) + if auto_bridge + state = EMPTY_OPTIMIZER + T = MOI.AbstractOptimizer + end + end + return CachingOptimizer{T,typeof(model_cache)}( + optimizer, model_cache, - NO_OPTIMIZER, + state, mode, IndexMap(), IndexMap(), + auto_bridge, ) end +# Added for compatibility with MOI 0.9.20 +function CachingOptimizer(cache::MOI.ModelLike, mode::CachingOptimizerMode) + return CachingOptimizer(cache; mode = mode) +end + function Base.show(io::IO, C::CachingOptimizer) indent = " "^get(io, :indent, 0) MOIU.print_with_acronym(io, summary(C)) @@ -75,31 +134,12 @@ function Base.show(io::IO, C::CachingOptimizer) return show(IOContext(io, :indent => get(io, :indent, 0) + 2), C.optimizer) end -""" - CachingOptimizer(model_cache::MOI.ModelLike, optimizer::AbstractOptimizer) - -Creates an `CachingOptimizer` in `AUTOMATIC` mode, with the optimizer -`optimizer`. - -The type of the optimizer returned is `CachingOptimizer{typeof(optimizer), -typeof(model_cache)}` so it does not support the function -`reset_optimizer(::CachingOptimizer, new_optimizer)` if the type of -`new_optimizer` is different from the type of `optimizer`. -""" -function CachingOptimizer( - model_cache::MOI.ModelLike, - optimizer::MOI.AbstractOptimizer, -) - @assert MOI.is_empty(model_cache) - @assert MOI.is_empty(optimizer) - return CachingOptimizer{typeof(optimizer),typeof(model_cache)}( - optimizer, - model_cache, - EMPTY_OPTIMIZER, - AUTOMATIC, - IndexMap(), - IndexMap(), - ) +function MOI.get(model::CachingOptimizer, attr::MOI.CoefficientType) + if state(model) == NO_OPTIMIZER + return MOI.get(model.model_cache, attr) + else + return MOI.get(model.optimizer, attr) + end end ## Methods for managing the state of CachingOptimizer. @@ -137,8 +177,8 @@ function reset_optimizer(m::CachingOptimizer, optimizer::MOI.AbstractOptimizer) if attr isa MOI.RawOptimizerAttribute # Even if the optimizer claims to `supports` `attr`, the value # might have a different meaning (e.g., two solvers with `logLevel` - # as a RawOptimizerAttribute). To be on the safe side, just skip all raw - # parameters. + # as a RawOptimizerAttribute). To be on the safe side, just skip all + # raw parameters. continue elseif !MOI.is_copyable(attr) || !MOI.supports(m.optimizer, attr)::Bool continue @@ -344,19 +384,70 @@ function MOI.add_variables(m::CachingOptimizer, n) return vindices end +""" + _bridge_if_needed( + f::Function, + m::CachingOptimizer; + add::Bool = false, + ) + +Return `f(m)`, under the assumption that the `.optimizer` field of `m` will be +wrapped in a `LazyBridgeOptimizer` if `f(m)` is currently false, and that doing +so would allow `f(m) == true`. However, only modify the `.optimizer` field if +`add == true`. + +`f` is a function that takes `m` as a single argument. It is typically a call +like `f(m) = MOI.supports_constraint(m, F, S)` for some `F` and `S`. +""" +function _bridge_if_needed( + f::Function, + model::CachingOptimizer; + add::Bool = false, +) + if !f(model.model_cache) + # If the cache doesn't, we dont. + return false + elseif model.state == NO_OPTIMIZER + # The cache does, and there is no optimizer, so we do. + return true + elseif f(model.optimizer) + # There is an optimizer, and it does. + return true + elseif !model.auto_bridge + # There is an optimizer, it doesn't, and we aren't bridging. + return false + end + reset_optimizer(model) + T = MOI.get(model, MOI.CoefficientType()) + bridge = MOI.instantiate(model.optimizer; with_bridge_type = T) + if f(bridge) + if add + model.optimizer = bridge + end + return true # We bridged, and now we support. + end + return false # Everything fails. +end + function MOI.supports_add_constrained_variable( m::CachingOptimizer, S::Type{<:MOI.AbstractScalarSet}, ) - return MOI.supports_add_constrained_variable(m.model_cache, S) && ( - m.state == NO_OPTIMIZER || - MOI.supports_add_constrained_variable(m.optimizer, S)::Bool - ) + return _bridge_if_needed(m) do model + return MOI.supports_add_constrained_variable(model, S) + end end + function MOI.add_constrained_variable( m::CachingOptimizer, set::S, ) where {S<:MOI.AbstractScalarSet} + supports = _bridge_if_needed(m; add = true) do model + return MOI.supports_add_constrained_variable(model, S) + end + if !supports && state(m) == ATTACHED_OPTIMIZER + throw(MOI.UnsupportedConstraint{MOI.SingleVariable,S}()) + end if m.state == MOIU.ATTACHED_OPTIMIZER if m.mode == MOIU.AUTOMATIC try @@ -399,10 +490,9 @@ function _supports_add_constrained_variables( m::CachingOptimizer, S::Type{<:MOI.AbstractVectorSet}, ) - return MOI.supports_add_constrained_variables(m.model_cache, S) && ( - m.state == NO_OPTIMIZER || - MOI.supports_add_constrained_variables(m.optimizer, S)::Bool - ) + return _bridge_if_needed(m) do model + return MOI.supports_add_constrained_variables(model, S) + end end # Split in two to solve ambiguity @@ -424,6 +514,12 @@ function MOI.add_constrained_variables( m::CachingOptimizer, set::S, ) where {S<:MOI.AbstractVectorSet} + supports = _bridge_if_needed(m; add = true) do model + return MOI.supports_add_constrained_variables(model, S) + end + if !supports && state(m) == ATTACHED_OPTIMIZER + throw(MOI.UnsupportedConstraint{MOI.VectorOfVariables,S}()) + end if m.state == ATTACHED_OPTIMIZER if m.mode == AUTOMATIC try @@ -470,10 +566,9 @@ function MOI.supports_constraint( F::Type{<:MOI.AbstractFunction}, S::Type{<:MOI.AbstractSet}, ) - return MOI.supports_constraint(m.model_cache, F, S) && ( - m.state == NO_OPTIMIZER || - MOI.supports_constraint(m.optimizer, F, S)::Bool - ) + return _bridge_if_needed(m) do model + return MOI.supports_constraint(model, F, S) + end end function MOI.add_constraint( @@ -481,6 +576,12 @@ function MOI.add_constraint( func::F, set::S, ) where {F<:MOI.AbstractFunction,S<:MOI.AbstractSet} + supports = _bridge_if_needed(m; add = true) do model + return MOI.supports_constraint(model, F, S) + end + if !supports && state(m) == ATTACHED_OPTIMIZER + throw(MOI.UnsupportedConstraint{F,S}()) + end if m.state == ATTACHED_OPTIMIZER if m.mode == AUTOMATIC try @@ -710,6 +811,32 @@ end # they are sent to the optimizer and when they are returned from the optimizer. # As a result, values of attributes must implement `map_indices`. +function MOI.set(m::CachingOptimizer, attr::MOI.ObjectiveFunction, value) + supports = _bridge_if_needed(m; add = true) do model + return MOI.supports(model, attr) + end + if !supports && state(m) == ATTACHED_OPTIMIZER + throw(MOI.UnsupportedAttribute(attr)) + end + if m.state == ATTACHED_OPTIMIZER + optimizer_value = map_indices(m.model_to_optimizer_map, value) + if m.mode == AUTOMATIC + try + MOI.set(m.optimizer, attr, optimizer_value) + catch err + if err isa MOI.NotAllowedError + reset_optimizer(m) + else + rethrow(err) + end + end + else + MOI.set(m.optimizer, attr, optimizer_value) + end + end + return MOI.set(m.model_cache, attr, value) +end + function MOI.set(m::CachingOptimizer, attr::MOI.AbstractModelAttribute, value) if m.state == ATTACHED_OPTIMIZER optimizer_value = map_indices(m.model_to_optimizer_map, value) @@ -777,6 +904,12 @@ function MOI.supports( (m.state == NO_OPTIMIZER || MOI.supports(m.optimizer, attr)::Bool) end +function MOI.supports(m::CachingOptimizer, attr::MOI.ObjectiveFunction) + return _bridge_if_needed(m) do model + return MOI.supports(model, attr) + end +end + function MOI.get(model::CachingOptimizer, attr::MOI.AbstractModelAttribute) if MOI.is_set_by_optimize(attr) if state(model) == NO_OPTIMIZER diff --git a/src/Utilities/mockoptimizer.jl b/src/Utilities/mockoptimizer.jl index f1cd4572b4..f5746dc135 100644 --- a/src/Utilities/mockoptimizer.jl +++ b/src/Utilities/mockoptimizer.jl @@ -335,6 +335,10 @@ function MOI.get(mock::MockOptimizer, attr::MOI.AbstractModelAttribute) end end +function MOI.get(mock::MockOptimizer, attr::MOI.CoefficientType) + return MOI.get(mock.inner_model, attr) +end + ##### ##### Names ##### diff --git a/src/Utilities/model.jl b/src/Utilities/model.jl index 614a83af7a..889e311d44 100644 --- a/src/Utilities/model.jl +++ b/src/Utilities/model.jl @@ -5,6 +5,8 @@ abstract type AbstractModelLike{T} <: MOI.ModelLike end abstract type AbstractOptimizer{T} <: MOI.AbstractOptimizer end const AbstractModel{T} = Union{AbstractModelLike{T},AbstractOptimizer{T}} +MOI.get(::AbstractModel{T}, ::MOI.CoefficientType) where {T} = T + # Variables function MOI.get(model::AbstractModel, ::MOI.NumberOfVariables)::Int64 if model.variable_indices === nothing diff --git a/src/attributes.jl b/src/attributes.jl index 8036b90217..40fb9ac6f7 100644 --- a/src/attributes.jl +++ b/src/attributes.jl @@ -921,6 +921,17 @@ attr = MOI.get(model, MOI.ObjectiveFunctionType()) """ struct ObjectiveFunctionType <: AbstractModelAttribute end +""" + CoefficientType() + +Return the coefficient type of a model. + +Defaults to `Float64`. +""" +struct CoefficientType <: AbstractModelAttribute end + +get(::ModelLike, ::CoefficientType) = Float64 + ## Optimizer attributes """ diff --git a/src/instantiate.jl b/src/instantiate.jl index 91ae1e71ea..8ad4d7b83a 100644 --- a/src/instantiate.jl +++ b/src/instantiate.jl @@ -92,6 +92,13 @@ function _instantiate_and_check(optimizer_constructor::OptimizerWithAttributes) return optimizer end +function _instantiate_and_check(optimizer::AbstractOptimizer) + if !is_empty(optimizer) + error("The provided `optimizer_constructor` is a non-empty optimizer.") + end + return optimizer +end + """ instantiate( optimizer_constructor, @@ -130,3 +137,6 @@ function instantiate( end return Bridges.full_bridge_optimizer(optimizer, with_bridge_type) end + +# Add a fallback so we don't add bridges on-top-of bridges! +instantiate(optimizer::Bridges.LazyBridgeOptimizer; kwargs...) = optimizer diff --git a/test/Utilities/cachingoptimizer.jl b/test/Utilities/cachingoptimizer.jl index cc838036a4..8fc61edcf2 100644 --- a/test/Utilities/cachingoptimizer.jl +++ b/test/Utilities/cachingoptimizer.jl @@ -694,3 +694,176 @@ end @test MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMAL @test MOI.get(model, MOI.PrimalStatus()) == MOI.FEASIBLE_POINT end + +MOI.Utilities.@model( + CachingAutoBridge, + (), + (MOI.GreaterThan, MOI.LessThan), + (), + (), + (), + (MOI.ScalarAffineFunction,), + (), + (), + true, +) +function MOI.supports( + ::CachingAutoBridge, + ::MOI.ObjectiveFunction{MOI.SingleVariable}, +) + return false +end + +@testset "auto-bridge-true-constraints" begin + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + CachingAutoBridge{Float64}(); + auto_bridge = true, + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + x = MOI.add_variables(model, 2) + @test model.optimizer isa CachingAutoBridge{Float64} + # Supports: + @test MOI.supports_constraint( + model, + MOI.ScalarAffineFunction{Float64}, + MOI.LessThan{Float64}, + ) + @test model.optimizer isa CachingAutoBridge{Float64} + MOI.add_constraint( + model, + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(1.0, x), 0.0), + MOI.LessThan(1.0), + ) + @test model.optimizer isa CachingAutoBridge{Float64} + # Doesn't support: + @test MOI.supports_constraint( + model, + MOI.VectorOfVariables, + MOI.Nonnegatives, + ) + @test model.optimizer isa CachingAutoBridge{Float64} + MOI.add_constraint(model, MOI.VectorOfVariables(x), MOI.Nonnegatives(2)) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} +end + +@testset "auto-bridge-true-constraints-fails" begin + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + CachingAutoBridge{Float64}(); + auto_bridge = true, + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + x = MOI.add_variables(model, 3) + @test !MOI.supports_constraint( + model, + MOI.VectorOfVariables, + MOI.SecondOrderCone, + ) + @test model.optimizer isa CachingAutoBridge{Float64} + MOI.add_constraint(model, MOI.VectorOfVariables(x), MOI.SecondOrderCone(3)) + @test model.optimizer isa CachingAutoBridge{Float64} + @test_throws( + MOI.UnsupportedConstraint, + MOI.Utilities.attach_optimizer(model), + ) +end + +@testset "auto-bridge-true-add_constrained_variables" begin + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + CachingAutoBridge{Float64}(); + auto_bridge = true, + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + @test model.optimizer isa CachingAutoBridge{Float64} + # Supports: + x, cx = MOI.add_constrained_variable(model, MOI.GreaterThan(0.0)) + @test model.optimizer isa CachingAutoBridge{Float64} + # Doesn't support: + @test MOI.supports_add_constrained_variables(model, MOI.Nonnegatives) + @test model.optimizer isa CachingAutoBridge{Float64} + y, cy = MOI.add_constrained_variables(model, MOI.Nonnegatives(2)) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} +end + +@testset "auto-bridge-true-objective" begin + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + CachingAutoBridge{Float64}(); + auto_bridge = true, + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + x = MOI.add_variable(model) + @test model.optimizer isa CachingAutoBridge{Float64} + # Supports: + f = MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x)], 0.0) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + @test model.optimizer isa CachingAutoBridge{Float64} + # Doesn't support: + f = MOI.SingleVariable(x) + @test MOI.supports(model, MOI.ObjectiveFunction{typeof(f)}()) + @test model.optimizer isa CachingAutoBridge{Float64} + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} +end + +@testset "auto-bridge-false" begin + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + CachingAutoBridge{Float64}(), + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + MOIU.attach_optimizer(model) + x = MOI.add_variables(model, 2) + @test model.optimizer isa CachingAutoBridge{Float64} + # Constrained variables + @test_throws( + MOI.UnsupportedConstraint, + MOI.add_constrained_variables(model, MOI.Nonnegatives(2)), + ) + # Constraints + @test_throws( + MOI.UnsupportedConstraint, + MOI.add_constraint( + model, + MOI.VectorOfVariables(x), + MOI.Nonnegatives(2), + ), + ) + # Objective functions + f = MOI.SingleVariable(x[1]) + @test_throws( + MOI.UnsupportedAttribute, + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f), + ) +end + +@testset "auto-bridge-true-constraints-already-bridged" begin + optimizer = + MOI.Bridges.full_bridge_optimizer(CachingAutoBridge{Float64}(), Float64) + model = MOIU.CachingOptimizer( + MOIU.Model{Float64}(), + optimizer; + auto_bridge = true, + ) + @test MOIU.state(model) == MOIU.EMPTY_OPTIMIZER + x = MOI.add_variables(model, 2) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} + # Supports: + MOI.add_constraint( + model, + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(1.0, x), 0.0), + MOI.LessThan(1.0), + ) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} + # Doesn't support: + MOI.add_constraint(model, MOI.VectorOfVariables(x), MOI.Nonnegatives(2)) + @test model.optimizer isa + MOI.Bridges.LazyBridgeOptimizer{CachingAutoBridge{Float64}} +end diff --git a/test/attributes.jl b/test/attributes.jl index 124cb612eb..dcf285ca61 100644 --- a/test/attributes.jl +++ b/test/attributes.jl @@ -140,6 +140,22 @@ function test_no_constraint_name() ) end +function test_coefficient_type() + model = MOI.Utilities.Model{Int}() + @test MOI.get(model, MOI.CoefficientType()) == Int + mock = MOI.Utilities.MockOptimizer(model) + @test MOI.get(mock, MOI.CoefficientType()) == Int + bridge = MOI.Bridges.full_bridge_optimizer(mock, Int) + @test MOI.get(bridge, MOI.CoefficientType()) == Int + cache = MOI.Utilities.CachingOptimizer( + MOI.Utilities.Model{Float64}(), + MOI.Utilities.AUTOMATIC, + ) + @test MOI.get(cache, MOI.CoefficientType()) == Float64 + MOI.Utilities.reset_optimizer(cache, bridge) + @test MOI.get(cache, MOI.CoefficientType()) == Int +end + function runtests() for name in names(@__MODULE__; all = true) if startswith("$name", "test_")