Skip to content

Standardized solution type in SciMLBase #114

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Feb 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 51 additions & 0 deletions .github/workflows/Downstream.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: IntegrationTest
on:
push:
branches: [master]
tags: [v*]
pull_request:

jobs:
test:
name: ${{ matrix.package.repo }}/${{ matrix.package.group }}
runs-on: ${{ matrix.os }}
env:
GROUP: ${{ matrix.package.group }}
strategy:
fail-fast: false
matrix:
julia-version: [1]
os: [ubuntu-latest]
package:
- {user: SciML, repo: DiffEqFlux.jl, group: DiffEqFlux}
- {user: SciML, repo: NeuarlPDE.jl, group: NNPDE}

steps:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.julia-version }}
arch: x64
- uses: julia-actions/julia-buildpkg@latest
- name: Clone Downstream
uses: actions/checkout@v2
with:
repository: ${{ matrix.package.user }}/${{ matrix.package.repo }}
path: downstream
- name: Load this and run the downstream tests
shell: julia --color=yes --project=downstream {0}
run: |
using Pkg
try
# force it to use this PR's version of the package
Pkg.develop(PackageSpec(path=".")) # resolver may fail with main deps
Pkg.update()
Pkg.test() # resolver may fail with test time deps
catch err
err isa Pkg.Resolve.ResolverError || rethrow()
# If we can't resolve that means this is incompatible by SemVer and this is fine
# It means we marked this as a breaking change, so we don't need to worry about
# Mistakenly introducing a breaking change, as we have intentionally made one
@info "Not compatible with this release. No problem." exception=err
exit(0) # Exit immediately, as a success
end
4 changes: 3 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "GalacticOptim"
uuid = "a75be94c-b780-496d-a8a9-0878b188d577"
authors = ["Vaibhavdixit02 <[email protected]>"]
version = "0.4.7"
version = "1.0.0"

[deps]
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
Expand All @@ -20,6 +20,7 @@ ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
TerminalLoggers = "5d786b92-1e48-4d6f-9151-6b4477ca9bed"
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
Expand All @@ -39,6 +40,7 @@ ProgressLogging = "0.1"
Reexport = "0.2, 1.0"
Requires = "1.0"
ReverseDiff = "1.4"
SciMLBase = "1.8.1"
TerminalLoggers = "0.1"
Tracker = "0.2"
Zygote = "0.5, 0.6"
Expand Down
3 changes: 2 additions & 1 deletion src/GalacticOptim.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@ module GalacticOptim

using Reexport
@reexport using DiffEqBase
@reexport using SciMLBase
using Requires
using DiffResults, ForwardDiff, Zygote, ReverseDiff, Tracker, FiniteDiff
@reexport using Optim, Flux
using Logging, ProgressLogging, Printf, ConsoleProgressMonitor, TerminalLoggers, LoggingExtras
using ArrayInterface, Base.Iterators

using ForwardDiff: DEFAULT_CHUNK_THRESHOLD
import DiffEqBase: OptimizationProblem, OptimizationFunction, AbstractADType
import SciMLBase: OptimizationProblem, OptimizationFunction, AbstractADType, __solve

import ModelingToolkit
import ModelingToolkit: AutoModelingToolkit
Expand Down
160 changes: 38 additions & 122 deletions src/solve.jl
Original file line number Diff line number Diff line change
@@ -1,34 +1,3 @@
abstract type AbstractOptimizationSolution end #experimental; comments welcome
mutable struct OptimizationSolution{O, Tx, Tf, Tls, Tsb} <: AbstractOptimizationSolution
method::O
initial_x::Tx
minimizer::Tx
minimum::Tf
iterations::Int
iteration_converged::Bool
ls_success::Tls
time_run::Float64
stopped_by::Tsb
end

function Base.show(io::IO, r::AbstractOptimizationSolution)
take = Iterators.take
failure_string = "failure"
if isa(r.ls_success, Bool) && !r.ls_success
failure_string *= " (line search failed)"
end

@printf io " * Status: %s\n\n" r.iteration_converged ? "success" : failure_string
@printf io " * Candidate solution\n"
fmt = " Final objective value: %e "*repeat(", %e ",length(r.minimum)-1)*"\n"
@eval @printf($io, $fmt, $r.minimum...)
#@printf io " Final objective value: %e\n" r.minimum
@printf io "\n"
@printf io " * Found with\n"
@printf io " Algorithm: %s\n" r.method
return
end

struct NullData end
const DEFAULT_DATA = Iterators.cycle((NullData(),))
Base.iterate(::NullData, i=1) = nothing
Expand All @@ -38,14 +7,6 @@ get_maxiters(data) = Iterators.IteratorSize(typeof(DEFAULT_DATA)) isa Iterators.
Iterators.IteratorSize(typeof(DEFAULT_DATA)) isa Iterators.SizeUnknown ?
typemax(Int) : length(data)

struct EnsembleOptimizationProblem
prob::Array{T, 1} where T<:OptimizationProblem
end

function DiffEqBase.solve(prob::Union{OptimizationProblem,EnsembleOptimizationProblem}, opt, args...;kwargs...)
__solve(prob, opt, args...; kwargs...)
end

#=
function update!(x::AbstractArray, x̄::AbstractArray{<:ForwardDiff.Dual})
x .-= x̄
Expand Down Expand Up @@ -159,15 +120,8 @@ function __solve(prob::OptimizationProblem, opt, data = DEFAULT_DATA;

_time = time()

OptimizationSolution(opt,
prob.u0,# initial_x,
θ, #pick_best_x(f_incr_pick, state),
save_best ? first(min_err) : first(x), # pick_best_f(f_incr_pick, state, d),
maxiters, #iteration,
maxiters >= maxiters, #iteration == options.iterations,
true,
_time-t0,
NamedTuple())
SciMLBase.build_solution(prob, opt, θ, x[1])
# here should be build_solution to create the output message
end


Expand Down Expand Up @@ -225,8 +179,16 @@ function __solve(prob::OptimizationProblem, opt::Optim.AbstractOptimizer,
optim_f = TwiceDifferentiable(_loss, (G, θ) -> f.grad(G, θ, cur...), fg!, (H,θ) -> f.hess(H,θ,cur...), prob.u0)
end

Optim.optimize(optim_f, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
original = Optim.optimize(optim_f, prob.u0, opt,
!(isnothing(maxiters)) ?
Optim.Options(;extended_trace = true,
callback = _cb,
iterations = maxiters,
kwargs...) :
Optim.Options(;extended_trace = true,
callback = _cb, kwargs...))
SciMLBase.build_solution(prob, opt, original.minimizer,
original.minimum; original=original)
end

function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN},
Expand Down Expand Up @@ -276,8 +238,14 @@ function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN
end
optim_f = OnceDifferentiable(_loss, f.grad, fg!, prob.u0)

Optim.optimize(optim_f, prob.lb, prob.ub, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
original = Optim.optimize(optim_f, prob.lb, prob.ub, prob.u0, opt,
!(isnothing(maxiters)) ? Optim.Options(;
extended_trace = true, callback = _cb,
iterations = maxiters, kwargs...) :
Optim.Options(;extended_trace = true,
callback = _cb, kwargs...))
SciMLBase.build_solution(prob, opt, original.minimizer,
original.minimum; original=original)
end


Expand Down Expand Up @@ -345,8 +313,14 @@ function __solve(prob::OptimizationProblem, opt::Optim.ConstrainedOptimizer,
ub = prob.ub === nothing ? [] : prob.ub
optim_fc = TwiceDifferentiableConstraints(cons!, cons_j!, cons_hl!, lb, ub, prob.lcons, prob.ucons)

Optim.optimize(optim_f, optim_fc, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
original = Optim.optimize(optim_f, optim_fc, prob.u0, opt,
!(isnothing(maxiters)) ? Optim.Options(;
extended_trace = true, callback = _cb,
iterations = maxiters, kwargs...) :
Optim.Options(;extended_trace = true,
callback = _cb, kwargs...))
SciMLBase.build_solution(prob, opt, original.minimizer,
original.minimum; original=original)
end


Expand Down Expand Up @@ -399,17 +373,8 @@ function __init__()

bboptre = !(isnothing(maxiters)) ? BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...) : BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...)


OptimizationSolution(opt.method,
[NaN],# initial_x,
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
bboptre.iterations, #iteration,
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
true,
bboptre.elapsed_time,
NamedTuple())

SciMLBase.build_solution(prob, opt, BlackBoxOptim.best_candidate(bboptre),
BlackBoxOptim.best_fitness(bboptre); original=bboptre)
end

function __solve(prob::EnsembleOptimizationProblem, opt::BBO, data = DEFAULT_DATA;
Expand Down Expand Up @@ -447,7 +412,7 @@ function __init__()
end

_loss = function(θ)
x = ntuple(i->first(prob.prob[i].f(θ, prob.prob[i].p, cur...)),length(prob.prob))
x = ntuple(i->first(prob.prob[i].f(θ, prob.prob[i].p, cur...)),length(prob.prob))
return x
end

Expand All @@ -464,17 +429,8 @@ function __init__()

bboptre = !(isnothing(maxiters)) ? BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(multi_bounds.lb[i], multi_bounds.ub[i]) for i in 1:length(multi_bounds.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, FitnessScheme=FitnessScheme, kwargs...) : BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(multi_bounds.lb[i], multi_bounds.ub[i]) for i in 1:length(multi_bounds.lb)], CallbackFunction = _cb, CallbackInterval = 0.0, FitnessScheme=FitnessScheme, kwargs...)


OptimizationSolution(opt.method,
[NaN],# initial_x,
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
bboptre.iterations, #iteration,
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
true,
bboptre.elapsed_time,
NamedTuple())

SciMLBase.build_solution(prob, opt, BlackBoxOptim.best_candidate(bboptre),
BlackBoxOptim.best_fitness(bboptre); original=bboptre)
end
end

Expand Down Expand Up @@ -528,15 +484,7 @@ function __init__()
(minf,minx,ret) = NLopt.optimize(opt, prob.u0)
_time = time()

OptimizationSolution(opt.algorithm,
prob.u0,# initial_x,
minx, #pick_best_x(f_incr_pick, state),
minf, # pick_best_f(f_incr_pick, state, d),
Int(opt.numevals), #iteration,
!(isnothing(maxiters)) ? opt.numevals >= maxiters : true, #iteration == options.iterations,
ret,
_time-t0,
NamedTuple())
SciMLBase.build_solution(prob, opt, minx, minf; original=nothing)
end
end

Expand Down Expand Up @@ -570,15 +518,7 @@ function __init__()

t1 = time()

OptimizationSolution(opt,
[NaN],# initial_x,
p.location, #pick_best_x(f_incr_pick, state),
p.value, # pick_best_f(f_incr_pick, state, d),
local_maxiters,
local_maxiters>=opt.maxeval, #not sure if that's correct
true,
t1 - t0,
NamedTuple())
SciMLBase.build_solution(prob, opt, p.location, p.value; original=p)
end
end

Expand Down Expand Up @@ -613,15 +553,7 @@ function __init__()
box = minimum(root)
t1 = time()

OptimizationSolution(opt,
[NaN],# initial_x,
QuadDIRECT.position(box, x0), #pick_best_x(f_incr_pick, state),
QuadDIRECT.value(box), # pick_best_f(f_incr_pick, state, d),
!(isnothing(maxiters)) ? maxiters : 0,
box.qnconverged, #not sure if that's correct
true,
t1 - t0,
NamedTuple())
SciMLBase.build_solution(prob, opt, QuadDIRECT.position(box, x0), QuadDIRECT.value(box); original=root)
end
end

Expand Down Expand Up @@ -669,15 +601,7 @@ function __init__()
: Evolutionary.Options(;callback = _cb, kwargs...))
t1 = time()

OptimizationSolution(summary(result),
prob.u0, #initial_x
Evolutionary.minimizer(result), #pick_best_x
minimum(result), #pick_best_f
Evolutionary.iterations(result), #iteration
Evolutionary.converged(result), #convergence status
true,
t1 - t0,
NamedTuple())
SciMLBase.build_solution(prob, opt, Evolutionary.minimizer(result), Evolutionary.minimum(result); original=result)
end
end
@require CMAEvolutionStrategy="8d3b24bd-414e-49e0-94fb-163cc3a3e411" begin
Expand Down Expand Up @@ -726,15 +650,7 @@ function __init__()
criterion = false
end

OptimizationSolution(opt,
prob.u0,# initial_x,
result.logger.xbest[end], #pick_best_x(f_incr_pick, state),
result.logger.fbest[end], # pick_best_f(f_incr_pick, state, d),
length(result.logger.fbest),
criterion,
true,
result.logger.times[end] - result.logger.times[1],
NamedTuple())
SciMLBase.build_solution(prob, opt, result.logger.xbest[end], result.logger.fbest[end]; original=result)
end
end
end
4 changes: 2 additions & 2 deletions test/diffeqfluxtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ callback = function (p, l, pred)

# using `remake` to re-create our `prob` with current parameters `p`
remade_solution = solve(remake(prob_ode, p = p), Tsit5(), saveat = tsteps)

# Tell sciml_train to not halt the optimization. If return true, then
# optimization stops.
return false
Expand Down Expand Up @@ -105,7 +105,7 @@ result_neuralode = GalacticOptim.solve(prob,
ADAM(), cb = callback,
maxiters = 300)

prob2 = remake(prob,u0=result_neuralode.minimizer)
prob2 = remake(prob,u0=result_neuralode.u)
result_neuralode2 = GalacticOptim.solve(prob2,
BFGS(initial_stepnorm=0.0001),
cb = callback,
Expand Down
2 changes: 1 addition & 1 deletion test/rosenbrock.jl
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ cons_circ = (x,p) -> [x[1]^2 + x[2]^2]
optprob = OptimizationFunction(rosenbrock, GalacticOptim.AutoForwardDiff();cons= cons_circ)
prob = OptimizationProblem(optprob, x0, lcons = [-Inf], ucons = [0.25^2])
sol = solve(prob, IPNewton())
@test sqrt(cons(sol.minimizer,nothing)[1]) ≈ 0.25 rtol = 1e-6
@test sqrt(cons(sol.u,nothing)[1]) ≈ 0.25 rtol = 1e-6

optprob = OptimizationFunction(rosenbrock, GalacticOptim.AutoZygote())
prob = OptimizationProblem(optprob, x0)
Expand Down