Skip to content

Commit 0b75283

Browse files
Merge pull request #358 from ArnoStrouwen/fixtuts
start fixing tutorials for strict docs
2 parents 83d8a19 + 6ee3635 commit 0b75283

File tree

4 files changed

+18
-13
lines changed

4 files changed

+18
-13
lines changed

docs/Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
AmplNLWriter = "7c4d4715-977e-5154-bfe0-e096adeac482"
33
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
44
FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41"
5+
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
56
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
67
Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
78
Ipopt_jll = "9cc047cb-c261-5740-88fc-0cf96f7bdcc7"
@@ -15,7 +16,9 @@ OptimizationMOI = "fd9f6733-72f4-499f-8506-86b2bdd0dea1"
1516
OptimizationNLopt = "4e6fcdb7-1186-4e1f-a706-475e75c168bb"
1617
OptimizationOptimJL = "36348300-93cb-4f02-beb5-3c3902f8871e"
1718
OptimizationOptimisers = "42dfb2eb-d2b4-4451-abcd-913932933ac1"
19+
OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed"
1820
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
21+
SciMLSensitivity = "1ed8b502-d754-442c-8d5d-10ac956f44a1"
1922
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
2023
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
2124

docs/src/tutorials/minibatch.md

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@
66
[Optimisers.jl page](@ref optimisers) for details on the installation and usage.
77

88
```@example
9-
using Flux, Optimization, OptimizationOptimisers, OrdinaryDiffEq, DiffEqSensitivity
9+
using Flux, Optimization, OptimizationOptimisers, OrdinaryDiffEq, SciMLSensitivity
1010
1111
function newtons_cooling(du, u, p, t)
1212
temp = u[1]
1313
k, temp_m = p
1414
du[1] = dT = -k*(temp-temp_m)
15-
end
15+
end
1616
1717
function true_sol(du, u, p, t)
1818
true_p = [log(2)/8.0, 100.0]
@@ -67,5 +67,4 @@ optfun = OptimizationFunction((θ, p, batch, time_batch) -> loss_adjoint(θ, bat
6767
optprob = OptimizationProblem(optfun, pp)
6868
using IterTools: ncycle
6969
res1 = Optimization.solve(optprob, Optimisers.ADAM(0.05), ncycle(train_loader, numEpochs), callback = callback)
70-
@test 10res1.minimum < l1
7170
```

docs/src/tutorials/rosenbrock.md

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -53,10 +53,8 @@ sol = solve(prob, Optim.KrylovTrustRegion())
5353
5454
# Now derivative-based optimizers with various constraints
5555
56-
cons = (res,x,p) -> res .= [x[1]^2 + x[2]^2]
57-
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();cons= cons)
58-
#prob = OptimizationProblem(optf, x0, _p)
59-
#sol = solve(prob, IPNewton()) # No lcons or rcons, so constraints not satisfied
56+
cons = (res,x,p) -> res .= [x[1]^2 + x[2]^2]
57+
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();cons= cons)
6058
6159
prob = OptimizationProblem(optf, x0, _p, lcons = [-Inf], ucons = [Inf])
6260
sol = solve(prob, IPNewton()) # Note that -Inf < x[1]^2 + x[2]^2 < Inf is always true
@@ -73,6 +71,14 @@ prob = OptimizationProblem(optf, x0, _p, lcons = [0.5], ucons = [0.5],
7371
sol = solve(prob, IPNewton()) # Notice now that x[1]^2 + x[2]^2 ≈ 0.5:
7472
# cons(sol.minimizer, _p) = 0.49999999999999994
7573
74+
function con_c(res,x,p)
75+
res .= [x[1]^2 + x[2]^2]
76+
end
77+
78+
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();cons= con_c)
79+
prob = OptimizationProblem(optf, x0, _p, lcons = [-Inf], ucons = [0.25^2])
80+
sol = solve(prob, IPNewton()) # -Inf < cons_circ(sol.minimizer, _p) = 0.25^2
81+
7682
function con2_c(res,x,p)
7783
res .= [x[1]^2 + x[2]^2, x[2]*sin(x[1])-x[1]]
7884
end
@@ -81,10 +87,7 @@ optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();cons= con
8187
prob = OptimizationProblem(optf, x0, _p, lcons = [-Inf,-Inf], ucons = [Inf,Inf])
8288
sol = solve(prob, IPNewton())
8389
84-
cons_circ = (x,p) -> res .= [x[1]^2 + x[2]^2]
85-
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff();cons= cons_circ)
86-
prob = OptimizationProblem(optf, x0, _p, lcons = [-Inf], ucons = [0.25^2])
87-
sol = solve(prob, IPNewton()) # -Inf < cons_circ(sol.minimizer, _p) = 0.25^2
90+
8891
8992
# Now let's switch over to OptimizationOptimisers with reverse-mode AD
9093
@@ -107,7 +110,7 @@ prob = OptimizationProblem(optf, x0, _p)
107110
sol = solve(prob, Opt(:LN_BOBYQA, 2))
108111
sol = solve(prob, Opt(:LD_LBFGS, 2))
109112
110-
## Add some box constarints and solve with a few NLopt.jl methods
113+
## Add some box constraints and solve with a few NLopt.jl methods
111114
112115
prob = OptimizationProblem(optf, x0, _p, lb=[-1.0, -1.0], ub=[0.8, 0.8])
113116
sol = solve(prob, Opt(:LD_LBFGS, 2))

docs/src/tutorials/symbolic.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ for the loss function:
2626

2727
```@example modelingtoolkit
2828
loss = (a - x)^2 + b * (y - x^2)^2
29-
sys = OptimizationSystem(loss,[x,y],[a,b])
29+
@named sys = OptimizationSystem(loss,[x,y],[a,b])
3030
```
3131

3232
In order to turn it into a problem for numerical solutions, we need to specify what

0 commit comments

Comments
 (0)