Skip to content

Commit

Permalink
DNMY: experimental testing for fast resolves of NLP
Browse files Browse the repository at this point in the history
This is NOT safe to merge because doesn't update the expression
graphs and so will break AmplNLWriter etc.
  • Loading branch information
odow committed Jul 14, 2022
1 parent e90c9d6 commit 88c9f48
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 1 deletion.
3 changes: 3 additions & 0 deletions src/JuMP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,8 @@ mutable struct Model <: AbstractModel
# A flag to track whether we have modified the model after calling
# optimize!.
is_model_dirty::Bool
# A flag to track whether we need to rebuild the NLP model before optimize!
is_nlp_model_dirty::Bool
# Enable extensions to attach arbitrary information to a JuMP model by
# using an extension-specific symbol as a key.
ext::Dict{Symbol,Any}
Expand Down Expand Up @@ -256,6 +258,7 @@ function direct_model(backend::MOI.ModelLike)
Dict{Symbol,Any}(),
0,
false,
true,
Dict{Symbol,Any}(),
true,
)
Expand Down
9 changes: 9 additions & 0 deletions src/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ end
function _init_NLP(model::Model)
if model.nlp_model === nothing
model.nlp_model = MOI.Nonlinear.Model()
model.is_nlp_model_dirty = true
end
return
end
Expand Down Expand Up @@ -161,6 +162,7 @@ function set_nonlinear_objective(model::Model, sense::MOI.OptimizationSense, x)
_init_NLP(model)
set_objective_sense(model, sense)
MOI.Nonlinear.set_objective(model.nlp_model, x)
model.is_nlp_model_dirty = true
return
end

Expand Down Expand Up @@ -206,6 +208,7 @@ end
function add_nonlinear_parameter(model::Model, value::Real)
_init_NLP(model)
p = MOI.Nonlinear.add_parameter(model.nlp_model, Float64(value))
model.is_nlp_model_dirty = true
return NonlinearParameter(model, p.value)
end

Expand Down Expand Up @@ -312,6 +315,7 @@ subexpression[1]: x + x ^ 2.0
function add_nonlinear_expression(model::Model, ex)
_init_NLP(model)
index = MOI.Nonlinear.add_expression(model.nlp_model, ex)
model.is_nlp_model_dirty = true
return NonlinearExpression(model, index.value)
end

Expand Down Expand Up @@ -439,6 +443,7 @@ function add_nonlinear_constraint(model::Model, ex::Expr)
_init_NLP(model)
f, set = _expr_to_constraint(ex)
c = MOI.Nonlinear.add_constraint(model.nlp_model, f, set)
model.is_nlp_model_dirty = true
return ConstraintRef(model, c, ScalarShape())
end

Expand All @@ -465,6 +470,7 @@ function delete(model::Model, c::NonlinearConstraintRef)
_init_NLP(model)
index = MOI.Nonlinear.ConstraintIndex(c.index.value)
MOI.Nonlinear.delete(model.nlp_model, index)
model.is_nlp_model_dirty = true
return
end

Expand Down Expand Up @@ -647,6 +653,7 @@ function register(
end
_init_NLP(model)
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f)
model.is_nlp_model_dirty = true
return
end

Expand Down Expand Up @@ -725,6 +732,7 @@ function register(
end
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f, ∇f)
end
model.is_nlp_model_dirty = true
return
end

Expand Down Expand Up @@ -780,6 +788,7 @@ function register(
end
_init_NLP(model)
MOI.Nonlinear.register_operator(model.nlp_model, op, dimension, f, ∇f, ∇²f)
model.is_nlp_model_dirty = true
return
end

Expand Down
4 changes: 3 additions & 1 deletion src/optimizer_interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -160,13 +160,15 @@ function optimize!(
)
# The nlp_model is not kept in sync, so re-set it here.
# TODO: Consider how to handle incremental solves.
if nonlinear_model(model) !== nothing
if nonlinear_model(model) !== nothing && model.is_nlp_model_dirty
@warn("Setting NLPBLock")
evaluator = MOI.Nonlinear.Evaluator(
nonlinear_model(model),
_differentiation_backend,
index.(all_variables(model)),
)
MOI.set(model, MOI.NLPBlock(), MOI.NLPBlockData(evaluator))
model.is_nlp_model_dirty = false
end
# If the user or an extension has provided an optimize hook, call
# that instead of solving the model ourselves
Expand Down
1 change: 1 addition & 0 deletions src/variables.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1062,6 +1062,7 @@ function add_variable end

function add_variable(model::Model, v::ScalarVariable, name::String = "")
model.is_model_dirty = true
model.is_nlp_model_dirty = true
return _moi_add_variable(backend(model), model, v, name)
end

Expand Down

0 comments on commit 88c9f48

Please sign in to comment.