@@ -232,13 +232,34 @@ H2 = Array{Float64}(undef, 2, 2)
232
232
optprob. hess (H2, [5.0 , 3.0 ])
233
233
@test all (isapprox (H2, [28802.0 - 2000.0 ; - 2000.0 200.0 ]; rtol= 1e-3 ))
234
234
235
- cons_j = optprob. cons_j
236
- optf = OptimizationFunction (rosenbrock, Optimization. AutoForwardDiff (), cons= con2_c, cons_j= cons_j, cons_jac_prototype= cons_jac_proto)
237
- optprob = Optimization. instantiate_function (optf, x0, Optimization. AutoForwardDiff (), nothing , 2 )
238
- @test optprob. cons_jac_prototype == sparse ([1.0 1.0 ; 1.0 1.0 ]) # make sure it's still using it
239
- J = Array {Float64} (undef, 2 , 2 )
240
- optprob. cons_j (J, [5.0 , 3.0 ])
241
- @test all (isapprox (J, [10.0 6.0 ; - 0.149013 - 0.958924 ]; rtol= 1e-3 ))
235
+ cons_j = (J, θ, p) -> optprob. cons_j (J, θ)
236
+ hess = (H, θ, p) -> optprob. hess (H, θ)
237
+ sH = sparse ([1 , 1 , 2 , 2 ], [1 , 2 , 1 , 2 ], zeros (4 ))
238
+ sJ = sparse ([1 , 1 , 2 , 2 ], [1 , 2 , 1 , 2 ], zeros (4 ))
239
+ optf = OptimizationFunction (rosenbrock, Optimization. AutoForwardDiff (), hess= hess, hess_prototype= copy (sH), cons= con2_c, cons_j= cons_j, cons_jac_prototype= copy (sJ))
240
+ optprob1 = Optimization. instantiate_function (optf, x0, Optimization. AutoForwardDiff (), nothing , 2 )
241
+ @test optprob1. hess_prototype == sparse ([0.0 0.0 ; 0.0 0.0 ]) # make sure it's still using it
242
+ optprob1. hess (sH, [5.0 , 3.0 ])
243
+ @test all (isapprox (sH, [28802.0 - 2000.0 ; - 2000.0 200.0 ]; rtol= 1e-3 ))
244
+ @test optprob1. cons_jac_prototype == sparse ([0.0 0.0 ; 0.0 0.0 ]) # make sure it's still using it
245
+ optprob1. cons_j (sJ, [5.0 , 3.0 ])
246
+ @test all (isapprox (sJ, [10.0 6.0 ; - 0.149013 - 0.958924 ]; rtol= 1e-3 ))
247
+
248
+ grad = (G, θ, p) -> optprob. grad (G, θ)
249
+ hess = (H, θ, p) -> optprob. hess (H, θ)
250
+ cons_j = (J, θ, p) -> optprob. cons_j (J, θ)
251
+ cons_h = (res, θ, p) -> optprob. cons_h (res, θ)
252
+ sH = sparse ([1 , 1 , 2 , 2 ], [1 , 2 , 1 , 2 ], zeros (4 ))
253
+ sJ = sparse ([1 , 1 , 2 , 2 ], [1 , 2 , 1 , 2 ], zeros (4 ))
254
+ sH3 = [sparse ([1 , 2 ], [1 , 2 ], zeros (2 )), sparse ([1 , 1 , 2 ], [1 , 2 , 1 ], zeros (3 ))]
255
+ optf = OptimizationFunction (rosenbrock, SciMLBase. NoAD (), grad= grad, hess= hess, cons= con2_c, cons_j= cons_j, cons_h= cons_h, hess_prototype= sH, cons_jac_prototype= sJ, cons_hess_prototype= sH3)
256
+ optprob2 = Optimization. instantiate_function (optf, x0, SciMLBase. NoAD (), nothing , 2 )
257
+ optprob2. hess (sH, [5.0 , 3.0 ])
258
+ @test all (isapprox (sH, [28802.0 - 2000.0 ; - 2000.0 200.0 ]; rtol= 1e-3 ))
259
+ optprob2. cons_j (sJ, [5.0 , 3.0 ])
260
+ @test all (isapprox (sJ, [10.0 6.0 ; - 0.149013 - 0.958924 ]; rtol= 1e-3 ))
261
+ optprob2. cons_h (sH3, [5.0 , 3.0 ])
262
+ @test sH3 ≈ [[2.0 0.0 ; 0.0 2.0 ], [2.8767727327346804 0.2836621681849162 ; 0.2836621681849162 - 6.622738308376736e-9 ]]
242
263
243
264
# Can we solve problems? Using AutoForwardDiff to test since we know that works
244
265
for consf in [cons, con2_c]
0 commit comments