You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
f =instantiate_function(prob.f,prob.u0,prob.f.adtype,prob.p)
87
-
88
-
@withprogress progress name="Training"begin
89
-
for (i,d) inenumerate(data)
90
-
f.grad(G, θ, d...)
91
-
x = f.f(θ, prob.p, d...)
92
-
cb_call =cb(θ, x...)
93
-
if!(typeof(cb_call) <:Bool)
94
-
error("The callback should return a boolean `halt` for whether to stop the optimization process. Please see the sciml_train documentation for information.")
error("The callback should return a boolean `halt` for whether to stop the optimization process.")
143
-
end
144
-
cur, state =iterate(data, state)
145
-
cb_call
146
-
end
147
-
148
-
if!(isnothing(maxiters)) && maxiters <=0.0
149
-
error("The number of maxiters has to be a non-negative and non-zero number.")
150
-
elseif!(isnothing(maxiters))
151
-
maxiters =convert(Int, maxiters)
152
-
end
153
-
154
-
f =instantiate_function(prob.f,prob.u0,prob.f.adtype,prob.p)
155
-
156
-
!(opt isa Optim.ZerothOrderOptimizer) && f.grad ===nothing&&error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")
cb_call =!(opt isa Optim.SAMIN) && opt.method ==NelderMead() ?cb(decompose_trace(trace).metadata["centroid"],x...) :cb(decompose_trace(trace).metadata["x"],x...)
205
-
if!(typeof(cb_call) <:Bool)
206
-
error("The callback should return a boolean `halt` for whether to stop the optimization process.")
207
-
end
208
-
cur, state =iterate(data, state)
209
-
cb_call
210
-
end
211
-
212
-
if!(isnothing(maxiters)) && maxiters <=0.0
213
-
error("The number of maxiters has to be a non-negative and non-zero number.")
214
-
elseif!(isnothing(maxiters))
215
-
maxiters =convert(Int, maxiters)
216
-
end
217
-
218
-
f =instantiate_function(prob.f,prob.u0,prob.f.adtype,prob.p)
219
-
220
-
!(opt isa Optim.ZerothOrderOptimizer) && f.grad ===nothing&&error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")
0 commit comments