Skip to content

Commit 1b90250

Browse files
Merge pull request #114 from SciML/solution
Standardized solution type in SciMLBase
2 parents f435dfe + bd3dc30 commit 1b90250

File tree

6 files changed

+97
-127
lines changed

6 files changed

+97
-127
lines changed

.github/workflows/Downstream.yml

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
name: IntegrationTest
2+
on:
3+
push:
4+
branches: [master]
5+
tags: [v*]
6+
pull_request:
7+
8+
jobs:
9+
test:
10+
name: ${{ matrix.package.repo }}/${{ matrix.package.group }}
11+
runs-on: ${{ matrix.os }}
12+
env:
13+
GROUP: ${{ matrix.package.group }}
14+
strategy:
15+
fail-fast: false
16+
matrix:
17+
julia-version: [1]
18+
os: [ubuntu-latest]
19+
package:
20+
- {user: SciML, repo: DiffEqFlux.jl, group: DiffEqFlux}
21+
- {user: SciML, repo: NeuarlPDE.jl, group: NNPDE}
22+
23+
steps:
24+
- uses: actions/checkout@v2
25+
- uses: julia-actions/setup-julia@v1
26+
with:
27+
version: ${{ matrix.julia-version }}
28+
arch: x64
29+
- uses: julia-actions/julia-buildpkg@latest
30+
- name: Clone Downstream
31+
uses: actions/checkout@v2
32+
with:
33+
repository: ${{ matrix.package.user }}/${{ matrix.package.repo }}
34+
path: downstream
35+
- name: Load this and run the downstream tests
36+
shell: julia --color=yes --project=downstream {0}
37+
run: |
38+
using Pkg
39+
try
40+
# force it to use this PR's version of the package
41+
Pkg.develop(PackageSpec(path=".")) # resolver may fail with main deps
42+
Pkg.update()
43+
Pkg.test() # resolver may fail with test time deps
44+
catch err
45+
err isa Pkg.Resolve.ResolverError || rethrow()
46+
# If we can't resolve that means this is incompatible by SemVer and this is fine
47+
# It means we marked this as a breaking change, so we don't need to worry about
48+
# Mistakenly introducing a breaking change, as we have intentionally made one
49+
@info "Not compatible with this release. No problem." exception=err
50+
exit(0) # Exit immediately, as a success
51+
end

Project.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "GalacticOptim"
22
uuid = "a75be94c-b780-496d-a8a9-0878b188d577"
33
authors = ["Vaibhavdixit02 <[email protected]>"]
4-
version = "0.4.7"
4+
version = "1.0.0"
55

66
[deps]
77
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
@@ -20,6 +20,7 @@ ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
2020
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
2121
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
2222
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
23+
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
2324
TerminalLoggers = "5d786b92-1e48-4d6f-9151-6b4477ca9bed"
2425
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
2526
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
@@ -39,6 +40,7 @@ ProgressLogging = "0.1"
3940
Reexport = "0.2, 1.0"
4041
Requires = "1.0"
4142
ReverseDiff = "1.4"
43+
SciMLBase = "1.8.1"
4244
TerminalLoggers = "0.1"
4345
Tracker = "0.2"
4446
Zygote = "0.5, 0.6"

src/GalacticOptim.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,15 @@ module GalacticOptim
22

33
using Reexport
44
@reexport using DiffEqBase
5+
@reexport using SciMLBase
56
using Requires
67
using DiffResults, ForwardDiff, Zygote, ReverseDiff, Tracker, FiniteDiff
78
@reexport using Optim, Flux
89
using Logging, ProgressLogging, Printf, ConsoleProgressMonitor, TerminalLoggers, LoggingExtras
910
using ArrayInterface, Base.Iterators
1011

1112
using ForwardDiff: DEFAULT_CHUNK_THRESHOLD
12-
import DiffEqBase: OptimizationProblem, OptimizationFunction, AbstractADType
13+
import SciMLBase: OptimizationProblem, OptimizationFunction, AbstractADType, __solve
1314

1415
import ModelingToolkit
1516
import ModelingToolkit: AutoModelingToolkit

src/solve.jl

Lines changed: 38 additions & 122 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,3 @@
1-
abstract type AbstractOptimizationSolution end #experimental; comments welcome
2-
mutable struct OptimizationSolution{O, Tx, Tf, Tls, Tsb} <: AbstractOptimizationSolution
3-
method::O
4-
initial_x::Tx
5-
minimizer::Tx
6-
minimum::Tf
7-
iterations::Int
8-
iteration_converged::Bool
9-
ls_success::Tls
10-
time_run::Float64
11-
stopped_by::Tsb
12-
end
13-
14-
function Base.show(io::IO, r::AbstractOptimizationSolution)
15-
take = Iterators.take
16-
failure_string = "failure"
17-
if isa(r.ls_success, Bool) && !r.ls_success
18-
failure_string *= " (line search failed)"
19-
end
20-
21-
@printf io " * Status: %s\n\n" r.iteration_converged ? "success" : failure_string
22-
@printf io " * Candidate solution\n"
23-
fmt = " Final objective value: %e "*repeat(", %e ",length(r.minimum)-1)*"\n"
24-
@eval @printf($io, $fmt, $r.minimum...)
25-
#@printf io " Final objective value: %e\n" r.minimum
26-
@printf io "\n"
27-
@printf io " * Found with\n"
28-
@printf io " Algorithm: %s\n" r.method
29-
return
30-
end
31-
321
struct NullData end
332
const DEFAULT_DATA = Iterators.cycle((NullData(),))
343
Base.iterate(::NullData, i=1) = nothing
@@ -38,14 +7,6 @@ get_maxiters(data) = Iterators.IteratorSize(typeof(DEFAULT_DATA)) isa Iterators.
387
Iterators.IteratorSize(typeof(DEFAULT_DATA)) isa Iterators.SizeUnknown ?
398
typemax(Int) : length(data)
409

41-
struct EnsembleOptimizationProblem
42-
prob::Array{T, 1} where T<:OptimizationProblem
43-
end
44-
45-
function DiffEqBase.solve(prob::Union{OptimizationProblem,EnsembleOptimizationProblem}, opt, args...;kwargs...)
46-
__solve(prob, opt, args...; kwargs...)
47-
end
48-
4910
#=
5011
function update!(x::AbstractArray, x̄::AbstractArray{<:ForwardDiff.Dual})
5112
x .-= x̄
@@ -159,15 +120,8 @@ function __solve(prob::OptimizationProblem, opt, data = DEFAULT_DATA;
159120

160121
_time = time()
161122

162-
OptimizationSolution(opt,
163-
prob.u0,# initial_x,
164-
θ, #pick_best_x(f_incr_pick, state),
165-
save_best ? first(min_err) : first(x), # pick_best_f(f_incr_pick, state, d),
166-
maxiters, #iteration,
167-
maxiters >= maxiters, #iteration == options.iterations,
168-
true,
169-
_time-t0,
170-
NamedTuple())
123+
SciMLBase.build_solution(prob, opt, θ, x[1])
124+
# here should be build_solution to create the output message
171125
end
172126

173127

@@ -225,8 +179,16 @@ function __solve(prob::OptimizationProblem, opt::Optim.AbstractOptimizer,
225179
optim_f = TwiceDifferentiable(_loss, (G, θ) -> f.grad(G, θ, cur...), fg!, (H,θ) -> f.hess(H,θ,cur...), prob.u0)
226180
end
227181

228-
Optim.optimize(optim_f, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
229-
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
182+
original = Optim.optimize(optim_f, prob.u0, opt,
183+
!(isnothing(maxiters)) ?
184+
Optim.Options(;extended_trace = true,
185+
callback = _cb,
186+
iterations = maxiters,
187+
kwargs...) :
188+
Optim.Options(;extended_trace = true,
189+
callback = _cb, kwargs...))
190+
SciMLBase.build_solution(prob, opt, original.minimizer,
191+
original.minimum; original=original)
230192
end
231193

232194
function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN},
@@ -276,8 +238,14 @@ function __solve(prob::OptimizationProblem, opt::Union{Optim.Fminbox,Optim.SAMIN
276238
end
277239
optim_f = OnceDifferentiable(_loss, f.grad, fg!, prob.u0)
278240

279-
Optim.optimize(optim_f, prob.lb, prob.ub, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
280-
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
241+
original = Optim.optimize(optim_f, prob.lb, prob.ub, prob.u0, opt,
242+
!(isnothing(maxiters)) ? Optim.Options(;
243+
extended_trace = true, callback = _cb,
244+
iterations = maxiters, kwargs...) :
245+
Optim.Options(;extended_trace = true,
246+
callback = _cb, kwargs...))
247+
SciMLBase.build_solution(prob, opt, original.minimizer,
248+
original.minimum; original=original)
281249
end
282250

283251

@@ -345,8 +313,14 @@ function __solve(prob::OptimizationProblem, opt::Optim.ConstrainedOptimizer,
345313
ub = prob.ub === nothing ? [] : prob.ub
346314
optim_fc = TwiceDifferentiableConstraints(cons!, cons_j!, cons_hl!, lb, ub, prob.lcons, prob.ucons)
347315

348-
Optim.optimize(optim_f, optim_fc, prob.u0, opt, !(isnothing(maxiters)) ? Optim.Options(;extended_trace = true, callback = _cb, iterations = maxiters, kwargs...)
349-
: Optim.Options(;extended_trace = true, callback = _cb, kwargs...))
316+
original = Optim.optimize(optim_f, optim_fc, prob.u0, opt,
317+
!(isnothing(maxiters)) ? Optim.Options(;
318+
extended_trace = true, callback = _cb,
319+
iterations = maxiters, kwargs...) :
320+
Optim.Options(;extended_trace = true,
321+
callback = _cb, kwargs...))
322+
SciMLBase.build_solution(prob, opt, original.minimizer,
323+
original.minimum; original=original)
350324
end
351325

352326

@@ -399,17 +373,8 @@ function __init__()
399373

400374
bboptre = !(isnothing(maxiters)) ? BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...) : BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...)
401375

402-
403-
OptimizationSolution(opt.method,
404-
[NaN],# initial_x,
405-
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
406-
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
407-
bboptre.iterations, #iteration,
408-
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
409-
true,
410-
bboptre.elapsed_time,
411-
NamedTuple())
412-
376+
SciMLBase.build_solution(prob, opt, BlackBoxOptim.best_candidate(bboptre),
377+
BlackBoxOptim.best_fitness(bboptre); original=bboptre)
413378
end
414379

415380
function __solve(prob::EnsembleOptimizationProblem, opt::BBO, data = DEFAULT_DATA;
@@ -447,7 +412,7 @@ function __init__()
447412
end
448413

449414
_loss = function(θ)
450-
x = ntuple(i->first(prob.prob[i].f(θ, prob.prob[i].p, cur...)),length(prob.prob))
415+
x = ntuple(i->first(prob.prob[i].f(θ, prob.prob[i].p, cur...)),length(prob.prob))
451416
return x
452417
end
453418

@@ -464,17 +429,8 @@ function __init__()
464429

465430
bboptre = !(isnothing(maxiters)) ? BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(multi_bounds.lb[i], multi_bounds.ub[i]) for i in 1:length(multi_bounds.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, FitnessScheme=FitnessScheme, kwargs...) : BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(multi_bounds.lb[i], multi_bounds.ub[i]) for i in 1:length(multi_bounds.lb)], CallbackFunction = _cb, CallbackInterval = 0.0, FitnessScheme=FitnessScheme, kwargs...)
466431

467-
468-
OptimizationSolution(opt.method,
469-
[NaN],# initial_x,
470-
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
471-
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
472-
bboptre.iterations, #iteration,
473-
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
474-
true,
475-
bboptre.elapsed_time,
476-
NamedTuple())
477-
432+
SciMLBase.build_solution(prob, opt, BlackBoxOptim.best_candidate(bboptre),
433+
BlackBoxOptim.best_fitness(bboptre); original=bboptre)
478434
end
479435
end
480436

@@ -528,15 +484,7 @@ function __init__()
528484
(minf,minx,ret) = NLopt.optimize(opt, prob.u0)
529485
_time = time()
530486

531-
OptimizationSolution(opt.algorithm,
532-
prob.u0,# initial_x,
533-
minx, #pick_best_x(f_incr_pick, state),
534-
minf, # pick_best_f(f_incr_pick, state, d),
535-
Int(opt.numevals), #iteration,
536-
!(isnothing(maxiters)) ? opt.numevals >= maxiters : true, #iteration == options.iterations,
537-
ret,
538-
_time-t0,
539-
NamedTuple())
487+
SciMLBase.build_solution(prob, opt, minx, minf; original=nothing)
540488
end
541489
end
542490

@@ -570,15 +518,7 @@ function __init__()
570518

571519
t1 = time()
572520

573-
OptimizationSolution(opt,
574-
[NaN],# initial_x,
575-
p.location, #pick_best_x(f_incr_pick, state),
576-
p.value, # pick_best_f(f_incr_pick, state, d),
577-
local_maxiters,
578-
local_maxiters>=opt.maxeval, #not sure if that's correct
579-
true,
580-
t1 - t0,
581-
NamedTuple())
521+
SciMLBase.build_solution(prob, opt, p.location, p.value; original=p)
582522
end
583523
end
584524

@@ -613,15 +553,7 @@ function __init__()
613553
box = minimum(root)
614554
t1 = time()
615555

616-
OptimizationSolution(opt,
617-
[NaN],# initial_x,
618-
QuadDIRECT.position(box, x0), #pick_best_x(f_incr_pick, state),
619-
QuadDIRECT.value(box), # pick_best_f(f_incr_pick, state, d),
620-
!(isnothing(maxiters)) ? maxiters : 0,
621-
box.qnconverged, #not sure if that's correct
622-
true,
623-
t1 - t0,
624-
NamedTuple())
556+
SciMLBase.build_solution(prob, opt, QuadDIRECT.position(box, x0), QuadDIRECT.value(box); original=root)
625557
end
626558
end
627559

@@ -669,15 +601,7 @@ function __init__()
669601
: Evolutionary.Options(;callback = _cb, kwargs...))
670602
t1 = time()
671603

672-
OptimizationSolution(summary(result),
673-
prob.u0, #initial_x
674-
Evolutionary.minimizer(result), #pick_best_x
675-
minimum(result), #pick_best_f
676-
Evolutionary.iterations(result), #iteration
677-
Evolutionary.converged(result), #convergence status
678-
true,
679-
t1 - t0,
680-
NamedTuple())
604+
SciMLBase.build_solution(prob, opt, Evolutionary.minimizer(result), Evolutionary.minimum(result); original=result)
681605
end
682606
end
683607
@require CMAEvolutionStrategy="8d3b24bd-414e-49e0-94fb-163cc3a3e411" begin
@@ -726,15 +650,7 @@ function __init__()
726650
criterion = false
727651
end
728652

729-
OptimizationSolution(opt,
730-
prob.u0,# initial_x,
731-
result.logger.xbest[end], #pick_best_x(f_incr_pick, state),
732-
result.logger.fbest[end], # pick_best_f(f_incr_pick, state, d),
733-
length(result.logger.fbest),
734-
criterion,
735-
true,
736-
result.logger.times[end] - result.logger.times[1],
737-
NamedTuple())
653+
SciMLBase.build_solution(prob, opt, result.logger.xbest[end], result.logger.fbest[end]; original=result)
738654
end
739655
end
740656
end

test/diffeqfluxtests.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ callback = function (p, l, pred)
4040

4141
# using `remake` to re-create our `prob` with current parameters `p`
4242
remade_solution = solve(remake(prob_ode, p = p), Tsit5(), saveat = tsteps)
43-
43+
4444
# Tell sciml_train to not halt the optimization. If return true, then
4545
# optimization stops.
4646
return false
@@ -105,7 +105,7 @@ result_neuralode = GalacticOptim.solve(prob,
105105
ADAM(), cb = callback,
106106
maxiters = 300)
107107

108-
prob2 = remake(prob,u0=result_neuralode.minimizer)
108+
prob2 = remake(prob,u0=result_neuralode.u)
109109
result_neuralode2 = GalacticOptim.solve(prob2,
110110
BFGS(initial_stepnorm=0.0001),
111111
cb = callback,

test/rosenbrock.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ cons_circ = (x,p) -> [x[1]^2 + x[2]^2]
6868
optprob = OptimizationFunction(rosenbrock, GalacticOptim.AutoForwardDiff();cons= cons_circ)
6969
prob = OptimizationProblem(optprob, x0, lcons = [-Inf], ucons = [0.25^2])
7070
sol = solve(prob, IPNewton())
71-
@test sqrt(cons(sol.minimizer,nothing)[1]) 0.25 rtol = 1e-6
71+
@test sqrt(cons(sol.u,nothing)[1]) 0.25 rtol = 1e-6
7272

7373
optprob = OptimizationFunction(rosenbrock, GalacticOptim.AutoZygote())
7474
prob = OptimizationProblem(optprob, x0)

0 commit comments

Comments
 (0)