Skip to content

Commit

Permalink
Error when new and old nonlinear APIs are mixed (#406)
Browse files Browse the repository at this point in the history
  • Loading branch information
odow authored Feb 28, 2024
1 parent 25301fa commit eca6a02
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 15 deletions.
33 changes: 18 additions & 15 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -141,14 +141,22 @@ function MOI.supports_add_constrained_variable(
return true
end

function _init_nlp_model(model)
if model.nlp_model === nothing
if !(model.nlp_data.evaluator isa _EmptyNLPEvaluator)
error("Cannot mix the new and legacy nonlinear APIs")
end
model.nlp_model = MOI.Nonlinear.Model()
end
return
end

function MOI.add_constrained_variable(
model::Optimizer,
set::MOI.Parameter{Float64},
)
model.inner = nothing
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
_init_nlp_model(model)
p = MOI.VariableIndex(_PARAMETER_OFFSET + length(model.parameters))
push!(model.list_of_variable_indices, p)
model.parameters[p] =
Expand Down Expand Up @@ -442,9 +450,7 @@ function MOI.add_constraint(
f::MOI.ScalarNonlinearFunction,
s::_SETS,
)
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
_init_nlp_model(model)
if !isempty(model.parameters)
_replace_parameters(model, f)
end
Expand All @@ -458,9 +464,7 @@ function MOI.set(
attr::MOI.ObjectiveFunction{MOI.ScalarNonlinearFunction},
func::MOI.ScalarNonlinearFunction,
)
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
_init_nlp_model(model)
if !isempty(model.parameters)
_replace_parameters(model, func)
end
Expand All @@ -474,9 +478,7 @@ end
MOI.supports(model::Optimizer, ::MOI.UserDefinedFunction) = true

function MOI.set(model::Optimizer, attr::MOI.UserDefinedFunction, args)
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
_init_nlp_model(model)
MOI.Nonlinear.register_operator(
model.nlp_model,
attr.name,
Expand All @@ -489,9 +491,7 @@ end
### ListOfSupportedNonlinearOperators

function MOI.get(model::Optimizer, attr::MOI.ListOfSupportedNonlinearOperators)
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
end
_init_nlp_model(model)
return MOI.get(model.nlp_model, attr)
end

Expand Down Expand Up @@ -635,6 +635,9 @@ MOI.supports(::Optimizer, ::MOI.NLPBlock) = true
MOI.get(model::Optimizer, ::MOI.NLPBlock) = model.nlp_data

function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData)
if model.nlp_model !== nothing
error("Cannot mix the new and legacy nonlinear APIs")
end
model.nlp_data = nlp_data
model.inner = nothing
return
Expand Down
18 changes: 18 additions & 0 deletions test/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -557,6 +557,24 @@ function test_ad_backend()
return
end

function test_mixing_new_old_api()
# new then old
model = Ipopt.Optimizer()
MOI.add_constrained_variable(model, MOI.Parameter(2.0))
bounds = MOI.NLPBoundsPair.([25.0, 40.0], [Inf, 40.0])
block_data = MOI.NLPBlockData(bounds, MOI.Test.HS071(true), true)
err = ErrorException("Cannot mix the new and legacy nonlinear APIs")
@test_throws err MOI.set(model, MOI.NLPBlock(), block_data)
# old then new
model = Ipopt.Optimizer()
bounds = MOI.NLPBoundsPair.([25.0, 40.0], [Inf, 40.0])
block_data = MOI.NLPBlockData(bounds, MOI.Test.HS071(true), true)
MOI.set(model, MOI.NLPBlock(), block_data)
err = ErrorException("Cannot mix the new and legacy nonlinear APIs")
@test_throws err MOI.add_constrained_variable(model, MOI.Parameter(2.0))
return
end

end # module TestMOIWrapper

TestMOIWrapper.runtests()

0 comments on commit eca6a02

Please sign in to comment.