Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Add logarithm-cosine loss #227

Merged
merged 12 commits into from
Jun 16, 2019
20 changes: 20 additions & 0 deletions Sources/TensorFlow/Loss.swift
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,26 @@ public func categoricalHingeLoss<Scalar: TensorFlowFloatingPoint>(
return max(Tensor(0), negative - positive + Tensor(1))
}

// Helper function for `losCoshLoss(predicted:expected:)`.
@differentiable
fileprivate func logCosh<Scalar: TensorFlowFloatingPoint>(
_ x: Tensor<Scalar>
) -> Tensor<Scalar> {
x + softplus(Tensor(-2) * x) - log(Tensor(2))
}

/// Returns the logarithm of the hyperbolic cosine of the error between predictions and expectations.
///
/// - Parameters:
/// - predicted: Predicted outputs from a neural network.
/// - expected: Expected values, i.e. targets, that correspond to the correct output.
@differentiable(wrt: predicted)
public func logCoshLoss<Scalar: TensorFlowFloatingPoint>(
predicted: Tensor<Scalar>, expected: Tensor<Scalar>
) -> Tensor<Scalar> {
(logCosh(predicted - expected)).mean()
}

/// Returns the Poisson loss between predictions and expectations.
///
/// - Parameters:
Expand Down
11 changes: 10 additions & 1 deletion Tests/TensorFlowTests/LossTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,14 @@ final class LossTests: XCTestCase {
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
}

func testLogCoshLoss() {
let predicted = Tensor<Float>([0.2, 0.3, 0.4])
let expected = Tensor<Float>([1.0, 4.0, 3.0])
let loss = logCoshLoss(predicted: predicted, expected: expected)
let expectedLoss: Float = 1.7368573
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
}

func testPoissonLoss() {
let predicted = Tensor<Float>([0.1, 0.2, 0.3])
let expected = Tensor<Float>([1, 2, 3])
Expand Down Expand Up @@ -238,7 +246,8 @@ final class LossTests: XCTestCase {
("testKullbackLeiblerDivergence", testKullbackLeiblerDivergence),
("testCategoricalHingeLoss", testCategoricalHingeLoss),
("testSquaredHingeLoss", testSquaredHingeLoss),
("testPoissonLoss", testPoissonLoss),
("testPoissonLoss",testPoissonLoss),
("testLogCoshLoss", testLogCoshLoss),
("testSoftmaxCrossEntropyWithProbabilitiesLoss",
testSoftmaxCrossEntropyWithProbabilitiesLoss),
("testSoftmaxCrossEntropyWithProbabilitiesGrad",
Expand Down