Skip to content

Commit 2736e2d

Browse files
Merge pull request #708 from ArnoStrouwen/format
reapply formatter
2 parents b389bc0 + a65ef30 commit 2736e2d

File tree

38 files changed

+200
-167
lines changed

38 files changed

+200
-167
lines changed

.JuliaFormatter.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
style = "sciml"
2-
format_markdown = true
2+
format_markdown = true
3+
format_docstrings = true

docs/pages.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,18 +4,18 @@ pages = ["index.md",
44
"tutorials/minibatch.md",
55
"tutorials/symbolic.md",
66
"tutorials/constraints.md",
7-
"tutorials/linearandinteger.md",
7+
"tutorials/linearandinteger.md"
88
],
99
"Examples" => [
10-
"examples/rosenbrock.md",
10+
"examples/rosenbrock.md"
1111
],
1212
"Basics" => [
1313
"API/optimization_problem.md",
1414
"API/optimization_function.md",
1515
"API/solve.md",
1616
"API/optimization_solution.md",
1717
"API/modelingtoolkit.md",
18-
"API/FAQ.md",
18+
"API/FAQ.md"
1919
],
2020
"Optimizer Packages" => [
2121
"BlackBoxOptim.jl" => "optimization_packages/blackboxoptim.md",
@@ -33,6 +33,6 @@ pages = ["index.md",
3333
"PRIMA.jl" => "optimization_packages/prima.md",
3434
"Polyalgorithms.jl" => "optimization_packages/polyopt.md",
3535
"QuadDIRECT.jl" => "optimization_packages/quaddirect.md",
36-
"SpeedMapping.jl" => "optimization_packages/speedmapping.md",
37-
],
36+
"SpeedMapping.jl" => "optimization_packages/speedmapping.md"
37+
]
3838
]

docs/src/optimization_packages/optim.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ For a more extensive documentation of all the algorithms and options, please con
5858
- [`Optim.IPNewton()`](https://julianlsolvers.github.io/Optim.jl/stable/#algo/ipnewton/)
5959

6060
+ `μ0` specifies the initial barrier penalty coefficient as either a number or `:auto`
61+
6162
+ `show_linesearch` is an option to turn on linesearch verbosity.
6263
+ Defaults:
6364

docs/src/tutorials/linearandinteger.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,11 +44,11 @@ using Optimization, OptimizationMOI, ModelingToolkit, HiGHS, LinearAlgebra
4444
@variables m [bounds = (0.0, Inf)]
4545
4646
cons = [u[1] + v[1] - w[1] ~ 150 # January
47-
u[2] + v[2] - w[2] - 1.01u[1] + 1.003w[1] ~ 100 # February
48-
u[3] + v[3] - w[3] - 1.01u[2] + 1.003w[2] ~ -200 # March
49-
u[4] - w[4] - 1.02v[1] - 1.01u[3] + 1.003w[3] ~ 200 # April
50-
u[5] - w[5] - 1.02v[2] - 1.01u[4] + 1.003w[4] ~ -50 # May
51-
-m - 1.02v[3] - 1.01u[5] + 1.003w[5] ~ -300]
47+
u[2] + v[2] - w[2] - 1.01u[1] + 1.003w[1] ~ 100 # February
48+
u[3] + v[3] - w[3] - 1.01u[2] + 1.003w[2] ~ -200 # March
49+
u[4] - w[4] - 1.02v[1] - 1.01u[3] + 1.003w[3] ~ 200 # April
50+
u[5] - w[5] - 1.02v[2] - 1.01u[4] + 1.003w[4] ~ -50 # May
51+
-m - 1.02v[3] - 1.01u[5] + 1.003w[5] ~ -300]
5252
5353
@named optsys = OptimizationSystem(m, [u..., v..., w..., m], [], constraints = cons)
5454
optprob = OptimizationProblem(optsys,

docs/src/tutorials/minibatch.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,8 @@ train_loader = Flux.Data.DataLoader((ode_data, t), batchsize = k)
6565
numEpochs = 300
6666
l1 = loss_adjoint(pp, train_loader.data[1], train_loader.data[2])[1]
6767
68-
optfun = OptimizationFunction((θ, p, batch, time_batch) -> loss_adjoint(θ, batch,
68+
optfun = OptimizationFunction(
69+
(θ, p, batch, time_batch) -> loss_adjoint(θ, batch,
6970
time_batch),
7071
Optimization.AutoZygote())
7172
optprob = OptimizationProblem(optfun, pp)

docs/src/tutorials/symbolic.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,9 @@ our parameter values are and the initial conditions. This looks like:
3434

3535
```@example modelingtoolkit
3636
u0 = [x => 1.0
37-
y => 2.0]
37+
y => 2.0]
3838
p = [a => 6.0
39-
b => 7.0]
39+
b => 7.0]
4040
```
4141

4242
And now we solve.

ext/OptimizationFiniteDiffExt.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ function Optimization.instantiate_function(f, x, adtype::AutoFiniteDiff, p,
1515

1616
if f.grad === nothing
1717
gradcache = FD.GradientCache(x, x, adtype.fdtype)
18-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
18+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
19+
res, x -> _f(x, args...),
1920
θ, gradcache)
2021
else
2122
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
@@ -123,7 +124,8 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
123124

124125
if f.grad === nothing
125126
gradcache = FD.GradientCache(cache.u0, cache.u0, adtype.fdtype)
126-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
127+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
128+
res, x -> _f(x, args...),
127129
θ, gradcache)
128130
else
129131
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)

ext/OptimizationForwardDiffExt.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
6565
if cons !== nothing && f.cons_h === nothing
6666
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
6767
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], x,
68-
ForwardDiff.Chunk{chunksize}())
68+
ForwardDiff.Chunk{chunksize}())
6969
for i in 1:num_cons]
7070
cons_h = function (res, θ)
7171
for i in 1:num_cons
@@ -143,7 +143,7 @@ function Optimization.instantiate_function(f::OptimizationFunction{true},
143143
if cons !== nothing && f.cons_h === nothing
144144
fncs = [(x) -> cons_oop(x)[i] for i in 1:num_cons]
145145
hess_config_cache = [ForwardDiff.HessianConfig(fncs[i], cache.u0,
146-
ForwardDiff.Chunk{chunksize}())
146+
ForwardDiff.Chunk{chunksize}())
147147
for i in 1:num_cons]
148148
cons_h = function (res, θ)
149149
for i in 1:num_cons

ext/OptimizationMTKExt.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,8 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
5656
adtype::AutoModelingToolkit, num_cons = 0)
5757
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p
5858

59-
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, cache.p;
59+
sys = complete(ModelingToolkit.modelingtoolkitize(OptimizationProblem(
60+
f, cache.u0, cache.p;
6061
lcons = fill(0.0,
6162
num_cons),
6263
ucons = fill(0.0,

ext/OptimizationReverseDiffExt.jl

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
4848
xdual = ForwardDiff.Dual{
4949
typeof(T),
5050
eltype(x),
51-
chunksize,
51+
chunksize
5252
}.(x, Ref(ForwardDiff.Partials((ones(eltype(x), chunksize)...,))))
5353
h_tape = ReverseDiff.GradientTape(_f, xdual)
5454
htape = ReverseDiff.compile(h_tape)
@@ -118,9 +118,9 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
118118
end
119119
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
120120
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
121-
x,
122-
ForwardDiff.Chunk{chunksize}(),
123-
T) for i in 1:num_cons]
121+
x,
122+
ForwardDiff.Chunk{chunksize}(),
123+
T) for i in 1:num_cons]
124124
cons_h = function (res, θ)
125125
for i in 1:num_cons
126126
ForwardDiff.jacobian!(res[i], gs[i], θ, jaccfgs[i], Val{false}())
@@ -180,7 +180,7 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
180180
xdual = ForwardDiff.Dual{
181181
typeof(T),
182182
eltype(cache.u0),
183-
chunksize,
183+
chunksize
184184
}.(cache.u0, Ref(ForwardDiff.Partials((ones(eltype(cache.u0), chunksize)...,))))
185185
h_tape = ReverseDiff.GradientTape(_f, xdual)
186186
htape = ReverseDiff.compile(h_tape)
@@ -253,9 +253,9 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
253253
end
254254
gs = [x -> grad_cons(x, conshtapes[i]) for i in 1:num_cons]
255255
jaccfgs = [ForwardDiff.JacobianConfig(gs[i],
256-
cache.u0,
257-
ForwardDiff.Chunk{chunksize}(),
258-
T) for i in 1:num_cons]
256+
cache.u0,
257+
ForwardDiff.Chunk{chunksize}(),
258+
T) for i in 1:num_cons]
259259
cons_h = function (res, θ)
260260
for i in 1:num_cons
261261
ForwardDiff.jacobian!(res[i], gs[i], θ, jaccfgs[i], Val{false}())

ext/OptimizationSparseDiffExt.jl

Lines changed: 39 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@ module OptimizationSparseDiffExt
33
import Optimization, Optimization.ArrayInterface
44
import Optimization.SciMLBase: OptimizationFunction
55
import Optimization.ADTypes: AutoSparseForwardDiff,
6-
AutoSparseFiniteDiff, AutoSparseReverseDiff
6+
AutoSparseFiniteDiff, AutoSparseReverseDiff
77
using Optimization.LinearAlgebra, ReverseDiff
88
isdefined(Base, :get_extension) ?
99
(using SparseDiffTools,
10-
SparseDiffTools.ForwardDiff, SparseDiffTools.FiniteDiff, Symbolics) :
10+
SparseDiffTools.ForwardDiff, SparseDiffTools.FiniteDiff, Symbolics) :
1111
(using ..SparseDiffTools,
12-
..SparseDiffTools.ForwardDiff, ..SparseDiffTools.FiniteDiff, ..Symbolics)
12+
..SparseDiffTools.ForwardDiff, ..SparseDiffTools.FiniteDiff, ..Symbolics)
1313

1414
function default_chunk_size(len)
1515
if len < ForwardDiff.DEFAULT_CHUNK_THRESHOLD
@@ -98,8 +98,8 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
9898
end
9999

100100
fcons = [(x) -> (_res = zeros(eltype(x), num_cons);
101-
cons(_res, x);
102-
_res[i]) for i in 1:num_cons]
101+
cons(_res, x);
102+
_res[i]) for i in 1:num_cons]
103103
cons_hess_caches = gen_conshess_cache.(fcons, Ref(x))
104104
cons_h = function (res, θ)
105105
for i in 1:num_cons
@@ -205,8 +205,8 @@ function Optimization.instantiate_function(f::OptimizationFunction{true},
205205
end
206206

207207
fcons = [(x) -> (_res = zeros(eltype(x), num_cons);
208-
cons(_res, x);
209-
_res[i]) for i in 1:num_cons]
208+
cons(_res, x);
209+
_res[i]) for i in 1:num_cons]
210210
cons_hess_caches = gen_conshess_cache.(fcons, Ref(cache.u0))
211211
cons_h = function (res, θ)
212212
for i in 1:num_cons
@@ -246,7 +246,8 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p
246246

247247
if f.grad === nothing
248248
gradcache = FD.GradientCache(x, x)
249-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
249+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
250+
res, x -> _f(x, args...),
250251
θ, gradcache)
251252
else
252253
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
@@ -314,8 +315,8 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p
314315
end
315316

316317
fcons = [(x) -> (_res = zeros(eltype(x), num_cons);
317-
cons(_res, x);
318-
_res[i]) for i in 1:num_cons]
318+
cons(_res, x);
319+
_res[i]) for i in 1:num_cons]
319320
conshess_caches = gen_conshess_cache.(fcons, Ref(x))
320321
cons_h = function (res, θ)
321322
for i in 1:num_cons
@@ -370,7 +371,8 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
370371

371372
if f.grad === nothing
372373
gradcache = FD.GradientCache(cache.u0, cache.u0)
373-
grad = (res, θ, args...) -> FD.finite_difference_gradient!(res, x -> _f(x, args...),
374+
grad = (res, θ, args...) -> FD.finite_difference_gradient!(
375+
res, x -> _f(x, args...),
374376
θ, gradcache)
375377
else
376378
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)
@@ -439,8 +441,8 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
439441
end
440442

441443
fcons = [(x) -> (_res = zeros(eltype(x), num_cons);
442-
cons(_res, x);
443-
_res[i]) for i in 1:num_cons]
444+
cons(_res, x);
445+
_res[i]) for i in 1:num_cons]
444446
conshess_caches = [gen_conshess_cache(fcons[i], cache.u0) for i in 1:num_cons]
445447
cons_h = function (res, θ)
446448
for i in 1:num_cons
@@ -527,7 +529,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
527529
xdual = ForwardDiff.Dual{
528530
typeof(T),
529531
eltype(x),
530-
min(chunksize, maximum(hess_colors)),
532+
min(chunksize, maximum(hess_colors))
531533
}.(x,
532534
Ref(ForwardDiff.Partials((ones(eltype(x),
533535
min(chunksize, maximum(hess_colors)))...,))))
@@ -611,23 +613,24 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
611613
if adtype.compile
612614
T = ForwardDiff.Tag(OptimizationSparseReverseTag(), eltype(x))
613615
xduals = [ForwardDiff.Dual{
614-
typeof(T),
615-
eltype(x),
616-
min(chunksize, maximum(conshess_colors[i])),
617-
}.(x,
618-
Ref(ForwardDiff.Partials((ones(eltype(x),
619-
min(chunksize, maximum(conshess_colors[i])))...,)))) for i in 1:num_cons]
616+
typeof(T),
617+
eltype(x),
618+
min(chunksize, maximum(conshess_colors[i]))
619+
}.(x,
620+
Ref(ForwardDiff.Partials((ones(eltype(x),
621+
min(chunksize, maximum(conshess_colors[i])))...,))))
622+
for i in 1:num_cons]
620623
consh_tapes = [ReverseDiff.GradientTape(fncs[i], xduals[i]) for i in 1:num_cons]
621624
conshtapes = ReverseDiff.compile.(consh_tapes)
622625
function grad_cons(res1, θ, htape)
623626
ReverseDiff.gradient!(res1, htape, θ)
624627
end
625628
gs = [(res1, x) -> grad_cons(res1, x, conshtapes[i]) for i in 1:num_cons]
626629
jaccfgs = [ForwardColorJacCache(gs[i],
627-
x;
628-
tag = typeof(T),
629-
colorvec = conshess_colors[i],
630-
sparsity = conshess_sparsity[i]) for i in 1:num_cons]
630+
x;
631+
tag = typeof(T),
632+
colorvec = conshess_colors[i],
633+
sparsity = conshess_sparsity[i]) for i in 1:num_cons]
631634
cons_h = function (res, θ, args...)
632635
for i in 1:num_cons
633636
SparseDiffTools.forwarddiff_color_jacobian!(res[i],
@@ -701,7 +704,7 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
701704
xdual = ForwardDiff.Dual{
702705
typeof(T),
703706
eltype(cache.u0),
704-
min(chunksize, maximum(hess_colors)),
707+
min(chunksize, maximum(hess_colors))
705708
}.(cache.u0,
706709
Ref(ForwardDiff.Partials((ones(eltype(cache.u0),
707710
min(chunksize, maximum(hess_colors)))...,))))
@@ -802,12 +805,13 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
802805
if adtype.compile
803806
T = ForwardDiff.Tag(OptimizationSparseReverseTag(), eltype(cache.u0))
804807
xduals = [ForwardDiff.Dual{
805-
typeof(T),
806-
eltype(cache.u0),
807-
min(chunksize, maximum(conshess_colors[i])),
808-
}.(cache.u0,
809-
Ref(ForwardDiff.Partials((ones(eltype(cache.u0),
810-
min(chunksize, maximum(conshess_colors[i])))...,)))) for i in 1:num_cons]
808+
typeof(T),
809+
eltype(cache.u0),
810+
min(chunksize, maximum(conshess_colors[i]))
811+
}.(cache.u0,
812+
Ref(ForwardDiff.Partials((ones(eltype(cache.u0),
813+
min(chunksize, maximum(conshess_colors[i])))...,))))
814+
for i in 1:num_cons]
811815
consh_tapes = [ReverseDiff.GradientTape(fncs[i], xduals[i]) for i in 1:num_cons]
812816
conshtapes = ReverseDiff.compile.(consh_tapes)
813817
function grad_cons(res1, θ, htape)
@@ -821,10 +825,10 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
821825
end
822826
end
823827
jaccfgs = [ForwardColorJacCache(gs[i],
824-
cache.u0;
825-
tag = typeof(T),
826-
colorvec = conshess_colors[i],
827-
sparsity = conshess_sparsity[i]) for i in 1:num_cons]
828+
cache.u0;
829+
tag = typeof(T),
830+
colorvec = conshess_colors[i],
831+
sparsity = conshess_sparsity[i]) for i in 1:num_cons]
828832
cons_h = function (res, θ)
829833
for i in 1:num_cons
830834
SparseDiffTools.forwarddiff_color_jacobian!(res[i],

ext/OptimizationTrackerExt.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ function Optimization.instantiate_function(f, x, adtype::AutoTracker, p,
1111
_f = (θ, args...) -> first(f.f(θ, p, args...))
1212

1313
if f.grad === nothing
14-
grad = (res, θ, args...) -> res .= Tracker.data(Tracker.gradient(x -> _f(x, args...),
14+
grad = (res, θ, args...) -> res .= Tracker.data(Tracker.gradient(
15+
x -> _f(x, args...),
1516
θ)[1])
1617
else
1718
grad = (G, θ, args...) -> f.grad(G, θ, p, args...)
@@ -42,7 +43,8 @@ function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
4243
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))
4344

4445
if f.grad === nothing
45-
grad = (res, θ, args...) -> res .= Tracker.data(Tracker.gradient(x -> _f(x, args...),
46+
grad = (res, θ, args...) -> res .= Tracker.data(Tracker.gradient(
47+
x -> _f(x, args...),
4648
θ)[1])
4749
else
4850
grad = (G, θ, args...) -> f.grad(G, θ, cache.p, args...)

lib/OptimizationBBO/src/OptimizationBBO.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
8888
O,
8989
D,
9090
P,
91-
C,
91+
C
9292
}) where {
9393
F,
9494
RC,
@@ -101,7 +101,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
101101
BBO,
102102
D,
103103
P,
104-
C,
104+
C
105105
}
106106
local x, cur, state
107107

0 commit comments

Comments
 (0)