@@ -275,17 +275,27 @@ end
275
275
276
276
rng = MersenneTwister (0 )
277
277
X = rand (rng, 2 , 32 )
278
+ true_params = [0.5 2.0 ]
279
+ init_params = [0.1 0.2 ]
278
280
classes = rand (rng, 1 : 2 , 32 )
279
- y = @. X[1 , : ] * X[1 , : ] - cos (2.6 * X[2 , : ]) + classes
281
+ y = [ X[1 , i ] * X[1 , i ] - cos (2.6 * X[2 , i ]) + true_params[ 1 , classes[i]] for i in 1 : 32 ]
280
282
281
- operators = OperatorEnum (; unary_operators= [cos], binary_operators= [+ , * , - ])
283
+ (true_val, true_grad) =
284
+ value_and_gradient (AutoZygote (), (X, init_params, [2.5 ])) do (X, params, c)
285
+ pred = [
286
+ X[1 , i] * X[1 , i] - cos (c[1 ] * X[2 , i]) + params[1 , classes[i]] for
287
+ i in 1 : 32
288
+ ]
289
+ sum (abs2, pred .- y)
290
+ end
282
291
292
+ operators = OperatorEnum (; unary_operators= [cos], binary_operators= [+ , * , - ])
283
293
ex = @parse_expression (
284
294
x * x - cos (2.5 * y) + p1,
285
295
operators = operators,
286
296
expression_type = ParametricExpression,
287
297
variable_names = [" x" , " y" ],
288
- extra_metadata = (parameter_names= [" p1" ], parameters= [ 0.5 0.2 ] )
298
+ extra_metadata = (parameter_names= [" p1" ], parameters= init_params )
289
299
)
290
300
f = let operators = operators, X = X, classes = classes, y = y
291
301
ex -> sum (abs2, ex (X, classes) .- y)
299
309
Float64,ParametricNode{Float64},Vector{Float64}
300
310
}
301
311
@test grad. metadata. _data. parameters isa Matrix{Float64}
312
+
313
+ # Loss value:
314
+ @test val ≈ true_val
315
+ # Gradient w.r.t. the constant:
316
+ @test grad. tree. gradient ≈ true_grad[3 ]
317
+ # Gradient w.r.t. the parameters:
318
+ @test grad. metadata. _data. parameters ≈ true_grad[2 ]
302
319
end
0 commit comments