Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit adae7a9

Browse files
jrabaryJaonary Rabarisoa
authored andcommitted
add huber loss support
1 parent 7e19749 commit adae7a9

File tree

2 files changed

+46
-0
lines changed

2 files changed

+46
-0
lines changed

Sources/TensorFlow/Loss.swift

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,3 +290,31 @@ public func sigmoidCrossEntropy<Scalar: TensorFlowFloatingPoint>(
290290
let negAbsLogits = max(logits, -logits) // Custom `abs` to compute gradients at `0`.
291291
return reduction(maxLogitsWithZero - logits * labels + log1p(exp(-negAbsLogits)))
292292
}
293+
294+
/// Returns the Huber loss between predictions and expectations.
295+
///
296+
/// For each value x in error = exprected - predicted
297+
/// loss = 0.5 * x^2 if |x| <= ∂
298+
/// loss = 0.5 * ∂^2 + ∂ * (|x| - ∂) if |x| > ∂
299+
/// See: https://en.wikipedia.org/wiki/Huber_loss
300+
///
301+
/// - Parameters:
302+
/// - predicted: Predicted outputs from a neural network.
303+
/// - expected: Expected values, i.e. targets, that correspond to the correct output.
304+
/// - delta: A floating point scalar, the point where the Huber loss function changes from a
305+
/// quadratic to linear.
306+
/// - reduction: Reduction to apply on the computed element-wise loss values.
307+
@differentiable(wrt: predicted)
308+
public func huberLoss<Scalar: TensorFlowFloatingPoint>(
309+
predicted: Tensor<Scalar>,
310+
expected: Tensor<Scalar>,
311+
delta: Scalar,
312+
reduction: @differentiable (Tensor<Scalar>) -> Tensor<Scalar> = { $0.sum() }
313+
) -> Tensor<Scalar> {
314+
315+
let error = expected - predicted
316+
let absError = abs(error)
317+
let quadratic = min(absError, delta)
318+
let linear = absError - quadratic
319+
return reduction((0.5 * quadratic * quadratic) + (delta * linear))
320+
}

Tests/TensorFlowTests/LossTests.swift

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -217,6 +217,23 @@ final class LossTests: XCTestCase {
217217
assertEqual(computedGradient, expectedGradient, accuracy: 1e-6)
218218
}
219219

220+
func testHuberLoss() {
221+
let predictions = Tensor<Float>([[0.9, 0.2, 0.2],
222+
[0.8, 0.4, 0.6]])
223+
let targets = Tensor<Float>([[1, 0, 1],
224+
[1, 0, 0]])
225+
226+
// test huber(x, x) = 0
227+
let loss1 = huberLoss(predicted: predictions, expected: predictions, delta: Float(1))
228+
XCTAssertTrue(loss1.isAlmostEqual(to: Tensor(0), tolerance: 0.001), "Huber Loss all correct")
229+
230+
// test huber(p, t) = 0.62500006 computed from tf.keras.losses.Huber
231+
let loss2 = huberLoss(predicted: predictions, expected: targets, delta: Float(1))
232+
XCTAssertTrue(
233+
loss2.isAlmostEqual(to: Tensor(0.62500006), tolerance: 0.001),
234+
"Huber Loss unweighted")
235+
}
236+
220237
static var allTests = [
221238
("testL1Loss", testL1Loss),
222239
("testL2Loss", testL2Loss),
@@ -237,5 +254,6 @@ final class LossTests: XCTestCase {
237254
testSoftmaxCrossEntropyWithProbabilitiesGrad),
238255
("testSigmoidCrossEntropyLoss", testSigmoidCrossEntropyLoss),
239256
("testSigmoidCrossEntropyGradient", testSigmoidCrossEntropyGradient),
257+
("testHuberLoss", testHuberLoss)
240258
]
241259
}

0 commit comments

Comments
 (0)