Skip to content

Commit b145d49

Browse files
Add and update test(s)
1 parent 65b853f commit b145d49

File tree

2 files changed

+35
-15
lines changed

2 files changed

+35
-15
lines changed

lib/OptimizationOptimJL/test/runtests.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ using Test
9393
sol = solve(prob, BFGS())
9494
@test 10 * sol.minimum < l1
9595

96-
function g!(G, x)
96+
function g!(G, x, p = nothing)
9797
G[1] = -2.0 * (1.0 - x[1]) - 400.0 * (x[2] - x[1]^2) * x[1]
9898
G[2] = 200.0 * (x[2] - x[1]^2)
9999
end

test/ADtests.jl

Lines changed: 34 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ optprob.grad(G2, x0)
9595
optprob.hess(H2, x0)
9696
@test H1 == H2
9797

98-
prob = OptimizationProblem(optprob, x0)
98+
prob = OptimizationProblem(optf, x0)
9999

100100
sol = solve(prob, Optim.BFGS())
101101
@test 10 * sol.minimum < l1
@@ -113,7 +113,7 @@ optprob.grad(G2, x0)
113113
optprob.hess(H2, x0)
114114
@test H1 == H2
115115

116-
prob = OptimizationProblem(optprob, x0)
116+
prob = OptimizationProblem(optf, x0)
117117

118118
sol = solve(prob, Optim.BFGS())
119119
@test 10 * sol.minimum < l1
@@ -132,7 +132,7 @@ optprob.grad(G2, x0)
132132
optprob.hess(H2, x0)
133133
@test H1 == H2
134134

135-
prob = OptimizationProblem(optprob, x0)
135+
prob = OptimizationProblem(optf, x0)
136136
sol = solve(prob, Optim.BFGS())
137137
@test 10 * sol.minimum < l1
138138

@@ -148,7 +148,8 @@ optprob.grad(G2, x0)
148148
@test G1 == G2
149149
@test_throws ErrorException optprob.hess(H2, x0)
150150

151-
prob = OptimizationProblem(optprob, x0)
151+
152+
prob = OptimizationProblem(optf, x0)
152153

153154
sol = solve(prob, Optim.BFGS())
154155
@test 10 * sol.minimum < l1
@@ -163,7 +164,7 @@ optprob.grad(G2, x0)
163164
optprob.hess(H2, x0)
164165
@test H1H2 rtol=1e-6
165166

166-
prob = OptimizationProblem(optprob, x0)
167+
prob = OptimizationProblem(optf, x0)
167168
sol = solve(prob, Optim.BFGS())
168169
@test 10 * sol.minimum < l1
169170

@@ -248,15 +249,34 @@ H2 = Array{Float64}(undef, 2, 2)
248249
optprob.hess(H2, [5.0, 3.0])
249250
@test all(isapprox(H2, [28802.0 -2000.0; -2000.0 200.0]; rtol = 1e-3))
250251

251-
cons_j = optprob.cons_j
252-
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff(), cons = con2_c,
253-
cons_j = cons_j, cons_jac_prototype = cons_jac_proto)
254-
optprob = Optimization.instantiate_function(optf, x0, Optimization.AutoForwardDiff(),
255-
nothing, 2)
256-
@test optprob.cons_jac_prototype == sparse([1.0 1.0; 1.0 1.0]) # make sure it's still using it
257-
J = Array{Float64}(undef, 2, 2)
258-
optprob.cons_j(J, [5.0, 3.0])
259-
@test all(isapprox(J, [10.0 6.0; -0.149013 -0.958924]; rtol = 1e-3))
252+
cons_j = (J, θ, p) -> optprob.cons_j(J, θ)
253+
hess = (H, θ, p) -> optprob.hess(H, θ)
254+
sH = sparse([1, 1, 2, 2], [1, 2, 1, 2], zeros(4))
255+
sJ = sparse([1, 1, 2, 2], [1, 2, 1, 2], zeros(4))
256+
optf = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff(), hess=hess, hess_prototype=copy(sH), cons=con2_c, cons_j=cons_j, cons_jac_prototype=copy(sJ))
257+
optprob1 = Optimization.instantiate_function(optf, x0, Optimization.AutoForwardDiff(), nothing, 2)
258+
@test optprob1.hess_prototype == sparse([0.0 0.0; 0.0 0.0]) # make sure it's still using it
259+
optprob1.hess(sH, [5.0, 3.0])
260+
@test all(isapprox(sH, [28802.0 -2000.0; -2000.0 200.0]; rtol=1e-3))
261+
@test optprob1.cons_jac_prototype == sparse([0.0 0.0; 0.0 0.0]) # make sure it's still using it
262+
optprob1.cons_j(sJ, [5.0, 3.0])
263+
@test all(isapprox(sJ, [10.0 6.0; -0.149013 -0.958924]; rtol=1e-3))
264+
265+
grad = (G, θ, p) -> optprob.grad(G, θ)
266+
hess = (H, θ, p) -> optprob.hess(H, θ)
267+
cons_j = (J, θ, p) -> optprob.cons_j(J, θ)
268+
cons_h = (res, θ, p) -> optprob.cons_h(res, θ)
269+
sH = sparse([1, 1, 2, 2], [1, 2, 1, 2], zeros(4))
270+
sJ = sparse([1, 1, 2, 2], [1, 2, 1, 2], zeros(4))
271+
sH3 = [sparse([1, 2], [1, 2], zeros(2)), sparse([1, 1, 2], [1, 2, 1], zeros(3))]
272+
optf = OptimizationFunction(rosenbrock, SciMLBase.NoAD(), grad=grad, hess=hess, cons=con2_c, cons_j=cons_j, cons_h=cons_h, hess_prototype=sH, cons_jac_prototype=sJ, cons_hess_prototype=sH3)
273+
optprob2 = Optimization.instantiate_function(optf, x0, SciMLBase.NoAD(), nothing, 2)
274+
optprob2.hess(sH, [5.0, 3.0])
275+
@test all(isapprox(sH, [28802.0 -2000.0; -2000.0 200.0]; rtol=1e-3))
276+
optprob2.cons_j(sJ, [5.0, 3.0])
277+
@test all(isapprox(sJ, [10.0 6.0; -0.149013 -0.958924]; rtol=1e-3))
278+
optprob2.cons_h(sH3, [5.0, 3.0])
279+
@test sH3 [[2.0 0.0; 0.0 2.0], [2.8767727327346804 0.2836621681849162; 0.2836621681849162 -6.622738308376736e-9]]
260280

261281
# Can we solve problems? Using AutoForwardDiff to test since we know that works
262282
for consf in [cons, con2_c]

0 commit comments

Comments
 (0)