Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for MOI.ScalarNonlinearFunction #239

Merged
merged 5 commits into from
Aug 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 127 additions & 2 deletions ext/NLoptMathOptInterfaceExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
variables::MOI.Utilities.VariablesContainer{Float64}
starting_values::Vector{Union{Nothing,Float64}}
nlp_data::MOI.NLPBlockData
nlp_model::Union{Nothing,MOI.Nonlinear.Model}
ad_backend::MOI.Nonlinear.AbstractAutomaticDifferentiation
sense::Union{Nothing,MOI.OptimizationSense}
objective::Union{
MOI.VariableIndex,
Expand Down Expand Up @@ -75,6 +77,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
Union{Nothing,Float64}[],
MOI.NLPBlockData([], _EmptyNLPEvaluator(), false),
nothing,
MOI.Nonlinear.SparseReverseMode(),
nothing,
nothing,
_ConstraintInfo{
MOI.ScalarAffineFunction{Float64},
Expand Down Expand Up @@ -115,6 +119,7 @@ function MOI.empty!(model::Optimizer)
MOI.empty!(model.variables)
empty!(model.starting_values)
model.nlp_data = MOI.NLPBlockData([], _EmptyNLPEvaluator(), false)
model.nlp_model = nothing
model.sense = nothing
model.objective = nothing
empty!(model.linear_le_constraints)
Expand All @@ -129,6 +134,7 @@ function MOI.is_empty(model::Optimizer)
return MOI.is_empty(model.variables) &&
isempty(model.starting_values) &&
model.nlp_data.evaluator isa _EmptyNLPEvaluator &&
model.nlp_model === nothing &&
model.sense == nothing &&
isempty(model.linear_le_constraints) &&
isempty(model.linear_eq_constraints) &&
Expand Down Expand Up @@ -449,7 +455,13 @@ end
function MOI.is_valid(
model::Optimizer,
ci::MOI.ConstraintIndex{F,S},
) where {F,S}
) where {
F<:Union{
MOI.ScalarAffineFunction{Float64},
MOI.ScalarQuadraticFunction{Float64},
},
S<:Union{MOI.LessThan{Float64},MOI.EqualTo{Float64}},
}
return 1 <= ci.value <= length(_constraints(model, F, S))
end

Expand Down Expand Up @@ -527,6 +539,9 @@ end
MOI.supports(::Optimizer, ::MOI.NLPBlock) = true

function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData)
if model.nlp_model !== nothing
error("Cannot mix the new and legacy nonlinear APIs")
end
model.nlp_data = nlp_data
return
end
Expand All @@ -546,7 +561,12 @@ function MOI.supports(
return true
end

MOI.get(model::Optimizer, ::MOI.ObjectiveFunctionType) = typeof(model.objective)
function MOI.get(model::Optimizer, ::MOI.ObjectiveFunctionType)
if model.nlp_model !== nothing && model.nlp_model.objective !== nothing
return MOI.ScalarNonlinearFunction
end
return typeof(model.objective)
end

function MOI.get(model::Optimizer, ::MOI.ObjectiveFunction{F}) where {F}
return convert(F, model.objective)::F
Expand All @@ -565,9 +585,108 @@ function MOI.set(
}
_check_inbounds(model, func)
model.objective = func
if model.nlp_model !== nothing
MOI.Nonlinear.set_objective(model.nlp_model, nothing)
end
return
end

# ScalarNonlinearFunction

function _init_nlp_model(model)
if model.nlp_model === nothing
if !(model.nlp_data.evaluator isa _EmptyNLPEvaluator)
error("Cannot mix the new and legacy nonlinear APIs")
end
model.nlp_model = MOI.Nonlinear.Model()
end
return
end

function MOI.is_valid(
model::Optimizer,
ci::MOI.ConstraintIndex{MOI.ScalarNonlinearFunction,S},
) where {
S<:Union{
MOI.EqualTo{Float64},
MOI.LessThan{Float64},
MOI.GreaterThan{Float64},
MOI.Interval{Float64},
},
}
if model.nlp_model === nothing
return false
end
index = MOI.Nonlinear.ConstraintIndex(ci.value)
return MOI.is_valid(model.nlp_model, index)
end

function MOI.supports_constraint(
::Optimizer,
::Type{MOI.ScalarNonlinearFunction},
::Type{S},
) where {
S<:Union{
MOI.EqualTo{Float64},
MOI.LessThan{Float64},
MOI.GreaterThan{Float64},
MOI.Interval{Float64},
},
}
return true
end

function MOI.add_constraint(
model::Optimizer,
f::MOI.ScalarNonlinearFunction,
set::Union{
MOI.EqualTo{Float64},
MOI.LessThan{Float64},
MOI.GreaterThan{Float64},
MOI.Interval{Float64},
},
)
_init_nlp_model(model)
index = MOI.Nonlinear.add_constraint(model.nlp_model, f, set)
return MOI.ConstraintIndex{typeof(f),typeof(set)}(index.value)
end

function MOI.supports(
::Optimizer,
::MOI.ObjectiveFunction{MOI.ScalarNonlinearFunction},
)
return true
end

function MOI.set(
model::Optimizer,
attr::MOI.ObjectiveFunction{MOI.ScalarNonlinearFunction},
func::MOI.ScalarNonlinearFunction,
)
_init_nlp_model(model)
MOI.Nonlinear.set_objective(model.nlp_model, func)
return
end

### MOI.AutomaticDifferentiationBackend

MOI.supports(::Optimizer, ::MOI.AutomaticDifferentiationBackend) = true

function MOI.get(model::Optimizer, ::MOI.AutomaticDifferentiationBackend)
return model.ad_backend
end

function MOI.set(
model::Optimizer,
::MOI.AutomaticDifferentiationBackend,
backend::MOI.Nonlinear.AbstractAutomaticDifferentiation,
)
model.ad_backend = backend
return
end

# optimize!

function _fill_gradient(grad, x, f::MOI.VariableIndex)
grad[f.value] = 1.0
return
Expand Down Expand Up @@ -704,6 +823,12 @@ function MOI.optimize!(model::Optimizer)
num_variables = length(model.starting_values)
model.inner = NLopt.Opt(model.options["algorithm"], num_variables)
_initialize_options!(model)
if model.nlp_model !== nothing
vars = MOI.VariableIndex.(1:num_variables)
model.nlp_data = MOI.NLPBlockData(
MOI.Nonlinear.Evaluator(model.nlp_model, model.ad_backend, vars),
)
end
NLopt.lower_bounds!(model.inner, model.variables.lower)
NLopt.upper_bounds!(model.inner, model.variables.upper)
nonlinear_equality_indices = findall(
Expand Down
127 changes: 127 additions & 0 deletions test/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ function test_runtests()
# Perhaps an expected failure because the problem is non-convex
r"^test_quadratic_nonconvex_constraint_basic$",
r"^test_quadratic_nonconvex_constraint_integration$",
# A whole bunch of issues to diagnose here
"test_basic_VectorNonlinearFunction_",
# INVALID_OPTION?
r"^test_nonlinear_expression_hs109$",
other_failures...,
],
)
Expand Down Expand Up @@ -162,6 +166,129 @@ function test_get_objective_function()
return
end

function test_ScalarNonlinearFunction_mix_apis_nlpblock_last()
model = NLopt.Optimizer()
x = MOI.add_variable(model)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.add_constraint(model, f, MOI.LessThan(1.0))
evaluator = MOI.Test.HS071(false, false)
bounds = MOI.NLPBoundsPair.([25.0, 40.0], [Inf, 40.0])
block = MOI.NLPBlockData(bounds, evaluator, true)
@test_throws(
ErrorException("Cannot mix the new and legacy nonlinear APIs"),
MOI.set(model, MOI.NLPBlock(), block),
)
return
end

function test_ScalarNonlinearFunction_mix_apis_nlpblock_first()
model = NLopt.Optimizer()
x = MOI.add_variable(model)
evaluator = MOI.Test.HS071(false, false)
bounds = MOI.NLPBoundsPair.([25.0, 40.0], [Inf, 40.0])
block = MOI.NLPBlockData(bounds, evaluator, true)
MOI.set(model, MOI.NLPBlock(), block)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
@test_throws(
ErrorException("Cannot mix the new and legacy nonlinear APIs"),
MOI.add_constraint(model, f, MOI.LessThan(1.0)),
)
return
end

function test_ScalarNonlinearFunction_is_valid()
model = NLopt.Optimizer()
x = MOI.add_variable(model)
F, S = MOI.ScalarNonlinearFunction, MOI.EqualTo{Float64}
@test MOI.is_valid(model, MOI.ConstraintIndex{F,S}(1)) == false
f = MOI.ScalarNonlinearFunction(:sin, Any[x])
c = MOI.add_constraint(model, f, MOI.EqualTo(0.0))
@test c isa MOI.ConstraintIndex{F,S}
@test MOI.is_valid(model, c) == true
return
end

function test_ScalarNonlinearFunction_ObjectiveFunctionType()
model = NLopt.Optimizer()
x = MOI.add_variable(model)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
F = MOI.ScalarNonlinearFunction
MOI.set(model, MOI.ObjectiveFunction{F}(), f)
@test MOI.get(model, MOI.ObjectiveFunctionType()) == F
return
end

function test_AutomaticDifferentiationBackend()
model = NLopt.Optimizer()
attr = MOI.AutomaticDifferentiationBackend()
@test MOI.supports(model, attr)
@test MOI.get(model, attr) == MOI.Nonlinear.SparseReverseMode()
MOI.set(model, attr, MOI.Nonlinear.ExprGraphOnly())
@test MOI.get(model, attr) == MOI.Nonlinear.ExprGraphOnly()
return
end

function test_ScalarNonlinearFunction_LessThan()
model = NLopt.Optimizer()
MOI.set(model, MOI.RawOptimizerAttribute("algorithm"), :LD_SLSQP)
x = MOI.add_variable(model)
# Needed for NLopt#31
MOI.set(model, MOI.VariablePrimalStart(), x, 1.0)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.add_constraint(model, f, MOI.LessThan(2.0))
MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), x)
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), exp(2); atol = 1e-4)
return
end

function test_ScalarNonlinearFunction_GreaterThan()
model = NLopt.Optimizer()
MOI.set(model, MOI.RawOptimizerAttribute("algorithm"), :LD_SLSQP)
x = MOI.add_variable(model)
# Needed for NLopt#31
MOI.set(model, MOI.VariablePrimalStart(), x, 1.0)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.add_constraint(model, f, MOI.GreaterThan(2.0))
MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE)
MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), x)
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), exp(2); atol = 1e-4)
return
end

function test_ScalarNonlinearFunction_Interval()
model = NLopt.Optimizer()
MOI.set(model, MOI.RawOptimizerAttribute("algorithm"), :LD_SLSQP)
x = MOI.add_variable(model)
# Needed for NLopt#31
MOI.set(model, MOI.VariablePrimalStart(), x, 1.0)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.add_constraint(model, f, MOI.Interval(1.0, 2.0))
MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), x)
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), exp(2); atol = 1e-4)
return
end

function test_ScalarNonlinearFunction_derivative_free()
model = NLopt.Optimizer()
MOI.set(model, MOI.RawOptimizerAttribute("algorithm"), :LN_COBYLA)
x = MOI.add_variable(model)
# Needed for NLopt#31
MOI.set(model, MOI.VariablePrimalStart(), x, 1.0)
f = MOI.ScalarNonlinearFunction(:log, Any[x])
MOI.add_constraint(model, f, MOI.GreaterThan(2.0))
MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE)
MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), x)
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), exp(2); atol = 1e-4)
return
end

end # module

TestMOIWrapper.runtests()
Loading