@@ -216,6 +216,33 @@ final class LossTests: XCTestCase {
216
216
[ - 0.0625 , - 0.01490036 , 0.04759964 , 0.0 ] ] )
217
217
assertEqual ( computedGradient, expectedGradient, accuracy: 1e-6 )
218
218
}
219
+ func testHuberLoss( ) {
220
+ let predictions = Tensor < Float > ( [ [ 0.9 , 0.2 , 0.2 ] , [ 0.8 , 0.4 , 0.6 ] ] )
221
+ let labels = Tensor < Float > ( [ [ 1 , 0 , 1 ] , [ 1 , 0 , 0 ] ] )
222
+
223
+ do {
224
+ // Test adapted from:
225
+ // https://github.com/tensorflow/tensorflow/blob/148f07323f97ef54998f28cd95c195064ce2c426/tensorflow/python/keras/losses_test.py#L1554
226
+ let loss = huberLoss ( predicted: predictions, expected: predictions, delta: 1 )
227
+ assertEqual ( loss, Tensor ( 0 ) , accuracy: 1e-6 )
228
+ }
229
+
230
+ do {
231
+ // Test adapted from:
232
+ // https://github.com/tensorflow/tensorflow/blob/148f07323f97ef54998f28cd95c195064ce2c426/tensorflow/python/keras/losses_test.py#L1560
233
+ // The expected loss was computed using Python TensorFlow 2.0.0-beta1:
234
+ // ```
235
+ // import tensorflow as tf # 2.0.0-beta1
236
+ // predictions = tf.constant([[0.9, 0.2, 0.2], [0.8, 0.4, 0.6]])
237
+ // labels = tf.constant([[1.0, 0.0, 1.0], [1.0, 0.0, 0.0]])
238
+ // loss = tf.losses.Huber(delta=1.0, reduction=tf.losses.Reduction.SUM)
239
+ // print(loss(labels, predictions))
240
+ // # tf.Tensor(0.62500006, shape=(), dtype=float32)
241
+ // ```
242
+ let loss = huberLoss ( predicted: predictions, expected: labels, delta: Float ( 1 ) )
243
+ assertEqual ( loss, Tensor ( 0.62500006 ) , accuracy: 1e-6 )
244
+ }
245
+ }
219
246
220
247
static var allTests = [
221
248
( " testL1Loss " , testL1Loss) ,
@@ -237,5 +264,6 @@ final class LossTests: XCTestCase {
237
264
testSoftmaxCrossEntropyWithProbabilitiesGrad) ,
238
265
( " testSigmoidCrossEntropyLoss " , testSigmoidCrossEntropyLoss) ,
239
266
( " testSigmoidCrossEntropyGradient " , testSigmoidCrossEntropyGradient) ,
267
+ ( " testHuberLoss " , testHuberLoss)
240
268
]
241
269
}
0 commit comments