Skip to content

Extensions for NLPModels.jl #99

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Apr 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 7 additions & 12 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,14 @@ uuid = "ff4d7338-4cf1-434d-91df-b86cb86fb843"
version = "0.3.8"

[deps]
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"

[compat]
NLPModels = "0.15, 0.16, 0.17, 0.18, 0.19, 0.20, 0.21"
julia = "^1.6"
[weakdeps]
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"

[extras]
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
NLPModelsTest = "7998695d-6960-4d3a-85c4-e1bceb8cd856"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[extensions]
SolverCoreNLPModelsExt = "NLPModels"

[targets]
test = ["LinearAlgebra", "Logging", "NLPModelsTest", "Test"]
[compat]
NLPModels = "0.15, 0.16, 0.17, 0.18, 0.19, 0.20, 0.21"
julia = "^1.10"
113 changes: 113 additions & 0 deletions ext/SolverCoreNLPModelsExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
module SolverCoreNLPModelsExt

using SolverCore
using NLPModels:
AbstractNLPModel,
AbstractNLSModel,
has_bounds,
neval_cons,
neval_obj,
neval_residual,
unconstrained

"""
reset!(stats::GenericExecutionStats, nlp::AbstractNLPModel)

Reset the internal flags of `stats` to `false` to Indicate
that the contents should not be trusted.
If an `AbstractNLPModel` is also provided,
the pre-allocated vectors are adjusted to the problem size.
"""
function SolverCore.reset!(
stats::GenericExecutionStats{T, S},
nlp::AbstractNLPModel{T, S},
) where {T, S}
stats.solution = similar(nlp.meta.x0)
stats.multipliers = similar(nlp.meta.y0)
stats.multipliers_L = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0)
stats.multipliers_U = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0)
SolverCore.reset!(stats)
stats
end

"""
solve!(solver, model; kwargs...)
solve!(solver, model, stats; kwargs...)

Apply `solver` to `model`.

# Arguments

- `solver::AbstractOptimizationSolver`: solver structure to hold all storage necessary for a solve
- `model::AbstractNLPModel`: the model solved, see `NLPModels.jl`
- `stats::GenericExecutionStats`: stats structure to hold solution information.

The first invocation allocates and returns a new `GenericExecutionStats`.
The second one fills out a preallocated stats structure and allows for efficient re-solves.

The `kwargs` are passed to the solver.

# Return Value

- `stats::GenericExecutionStats`: stats structure holding solution information.
"""
function SolverCore.solve!(
solver::AOS,
model::AbstractNLPModel{T, S};
kwargs...,
) where {AOS <: AbstractOptimizationSolver, T, S}
stats = GenericExecutionStats(model)
solve!(solver, model, stats; kwargs...)
end

function SolverCore.solve!(
::AbstractOptimizationSolver,
::AbstractNLPModel,
::GenericExecutionStats;
kwargs...,
) end

function SolverCore.GenericExecutionStats(
nlp::AbstractNLPModel{T, S};
status::Symbol = :unknown,
solution::S = similar(nlp.meta.x0),
objective::T = T(Inf),
dual_feas::T = T(Inf),
primal_feas::T = unconstrained(nlp) ? zero(T) : T(Inf),
multipliers::S = similar(nlp.meta.y0),
multipliers_L::V = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0),
multipliers_U::V = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0),
iter::Int = -1,
elapsed_time::Real = Inf,
solver_specific::Dict{Symbol, Tsp} = Dict{Symbol, Any}(),
) where {T, S, V, Tsp}
SolverCore.check_status(status)
return GenericExecutionStats{T, S, V, Tsp}(
false,
status,
false,
solution,
false,
objective,
false,
dual_feas,
false,
primal_feas,
false,
multipliers,
false,
multipliers_L,
multipliers_U,
false,
iter,
false,
elapsed_time,
false,
solver_specific,
)
end

SolverCore.eval_fun(nlp::AbstractNLPModel) = neval_obj(nlp) + neval_cons(nlp)
SolverCore.eval_fun(nls::AbstractNLSModel) = neval_residual(nls) + neval_cons(nls)

end # end of module
17 changes: 0 additions & 17 deletions src/SolverCore.jl
Original file line number Diff line number Diff line change
@@ -1,26 +1,9 @@
module SolverCore

using LinearAlgebra: LinearAlgebra, Symmetric, factorize, ldiv!, mul!, norm, qr
using NLPModels:
NLPModels,
AbstractNLPModel,
AbstractNLSModel,
cons!,
grad!,
has_bounds,
hess_coord!,
jac_coord!,
neval_cons,
neval_obj,
neval_residual,
obj,
reset!,
unconstrained
using Printf: Printf, @printf, @sprintf

include("logger.jl")
include("stats.jl")
include("solver.jl")
include("dummy_solver.jl")

end
25 changes: 9 additions & 16 deletions src/solver.jl
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
export AbstractSolver, AbstractOptimizationSolver, solve!
export AbstractSolver, AbstractOptimizationSolver, solve!, reset!

"Abstract type from which JSO solvers derive."
abstract type AbstractSolver end

abstract type AbstractOptimizationSolver <: AbstractSolver end

"""
reset!(solver::AbstractOptimizationSolver, model::AbstractNLPModel)
reset!(solver::::AbstractSolver, model)
reset!(solver::::AbstractSolver)

Use in the context of restarting or reusing the `solver` structure.
Reset the internal fields of `solver` for the `model` before calling `solve!` on the same structure.
`model` must have the same number of variables, bounds and constraints as that used to instantiate `solver`.
"""
function NLPModels.reset!(::AbstractOptimizationSolver, ::AbstractNLPModel) end
function reset!(solver::AbstractSolver) end
function reset!(solver::AbstractSolver, model) end

"""
solve!(solver, model; kwargs...)
Expand All @@ -22,8 +24,8 @@ Apply `solver` to `model`.

# Arguments

- `solver::AbstractOptimizationSolver`: solver structure to hold all storage necessary for a solve
- `model::AbstractNLPModel`: the model solved, see `NLPModels.jl`
- `solver::::AbstractSolver`: solver structure to hold all storage necessary for a solve
- `model`: the model solved
- `stats::GenericExecutionStats`: stats structure to hold solution information.

The first invocation allocates and returns a new `GenericExecutionStats`.
Expand All @@ -35,14 +37,5 @@ The `kwargs` are passed to the solver.

- `stats::GenericExecutionStats`: stats structure holding solution information.
"""
function solve!(solver::AbstractOptimizationSolver, model::AbstractNLPModel; kwargs...)
stats = GenericExecutionStats(model)
solve!(solver, model, stats; kwargs...)
end

function solve!(
::AbstractOptimizationSolver,
::AbstractNLPModel,
::GenericExecutionStats;
kwargs...,
) end
function solve!(solver::AbstractSolver, model; kwargs...) end
function solve!(solver::AbstractSolver, model, stats; kwargs...) end
68 changes: 9 additions & 59 deletions src/stats.jl
Original file line number Diff line number Diff line change
Expand Up @@ -170,56 +170,14 @@ function GenericExecutionStats{T, S, V, Tsp}(;
)
end

function GenericExecutionStats(
nlp::AbstractNLPModel{T, S};
status::Symbol = :unknown,
solution::S = similar(nlp.meta.x0),
objective::T = T(Inf),
dual_feas::T = T(Inf),
primal_feas::T = unconstrained(nlp) ? zero(T) : T(Inf),
multipliers::S = similar(nlp.meta.y0),
multipliers_L::V = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0),
multipliers_U::V = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0),
iter::Int = -1,
elapsed_time::Real = Inf,
solver_specific::Dict{Symbol, Tsp} = Dict{Symbol, Any}(),
) where {T, S, V, Tsp}
check_status(status)
return GenericExecutionStats{T, S, V, Tsp}(
false,
status,
false,
solution,
false,
objective,
false,
dual_feas,
false,
primal_feas,
false,
multipliers,
false,
multipliers_L,
multipliers_U,
false,
iter,
false,
elapsed_time,
false,
solver_specific,
)
end

"""
reset!(stats::GenericExecutionStats)
reset!(stats::GenericExecutionStats, nlp::AbstractNLPModel)
reset!(stats::GenericExecutionStats, problem)

Reset the internal flags of `stats` to `false` to Indicate
that the contents should not be trusted.
If an `AbstractNLPModel` is also provided,
the pre-allocated vectors are adjusted to the problem size.
"""
function NLPModels.reset!(stats::GenericExecutionStats)
function reset!(stats::GenericExecutionStats{T, S, V, Tsp}) where {T, S, V, Tsp}
stats.status_reliable = false
stats.solution_reliable = false
stats.objective_reliable = false
Expand All @@ -233,16 +191,8 @@ function NLPModels.reset!(stats::GenericExecutionStats)
stats
end

function NLPModels.reset!(
stats::GenericExecutionStats{T, S},
nlp::AbstractNLPModel{T, S},
) where {T, S}
stats.solution = similar(nlp.meta.x0)
stats.multipliers = similar(nlp.meta.y0)
stats.multipliers_L = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0)
stats.multipliers_U = similar(nlp.meta.y0, has_bounds(nlp) ? nlp.meta.nvar : 0)
reset!(stats)
stats
function reset!(stats::GenericExecutionStats, problem::Any)
return reset!(stats)
end

"""
Expand Down Expand Up @@ -508,7 +458,7 @@ function getStatus(stats::AbstractExecutionStats)
end

"""
get_status(nlp, kwargs...)
get_status(problem, kwargs...)

Return the output of the solver based on the information in the keyword arguments.
Use `show_statuses()` for the full list.
Expand All @@ -525,9 +475,11 @@ The keyword arguments may contain:
- `max_eval::Integer`: limit on the number of evaluations defined by `eval_fun` (default: `typemax(Int)`);
- `max_time::Float64 = Inf`: limit on the time (default: `Inf`);
- `max_iter::Integer`: limit on the number of iterations (default: `typemax(Int)`).

The `problem` is used to check number of evaluations with SolverCore.eval_fun(problem).
"""
function get_status(
nlp::AbstractNLPModel;
nlp;
elapsed_time::Float64 = 0.0,
iter::Integer = 0,
optimal::Bool = false,
Expand Down Expand Up @@ -565,6 +517,4 @@ function get_status(
:unknown
end
end

eval_fun(nlp::AbstractNLPModel) = neval_obj(nlp) + neval_cons(nlp)
eval_fun(nls::AbstractNLSModel) = neval_residual(nls) + neval_cons(nls)
eval_fun(::Any) = typemax(Int)
2 changes: 1 addition & 1 deletion src/dummy_solver.jl → test/dummy-solver.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ function dummy_solver(
solve!(solver, nlp, stats, args...; kwargs...)
end

function solve!(
function SolverCore.solve!(
solver::DummySolver{S},
nlp::AbstractNLPModel{T, S},
stats::GenericExecutionStats;
Expand Down
2 changes: 2 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ using NLPModels, NLPModelsTest
# stdlib
using LinearAlgebra, Logging

include("dummy-solver.jl")

#=
Don't add your tests to runtests.jl. Instead, create files named

Expand Down
2 changes: 1 addition & 1 deletion test/test-callback.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@
set_status!(stats, :user)
end
end
stats = SolverCore.dummy_solver(nlp, max_eval = 20, callback = callback)
stats = dummy_solver(nlp, max_eval = 20, callback = callback)
@test stats.iter == 3
end
2 changes: 1 addition & 1 deletion test/test-logging.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ function test_logging()

with_logger(ConsoleLogger()) do
@info "Testing dummy solver with logger"
SolverCore.dummy_solver(nlp, max_eval = 20)
dummy_solver(nlp, max_eval = 20)
end
end

Expand Down
6 changes: 3 additions & 3 deletions test/test-restart.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
@testset "test restart" begin
nlp = HS10()
solver = SolverCore.DummySolver(nlp)
solver = DummySolver(nlp)
stats = GenericExecutionStats(nlp)
solve!(solver, nlp, stats, verbose = false)
@test stats.status == :first_order
# Try with a new intial guess
nlp.meta.x0 .= 0.2
reset!(solver, nlp)
SolverCore.reset!(solver, nlp)
solve!(solver, nlp, stats, verbose = false)
@test stats.status == :first_order
# Try with a new problem of the same size
nlp = HS10()
reset!(solver, nlp)
SolverCore.reset!(solver, nlp)
solve!(solver, nlp, stats, verbose = false)
@test stats.status == :first_order
end
Loading
Loading