Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Add hinge loss function #185

Merged
merged 3 commits into from
Jun 8, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions Sources/TensorFlow/Loss.swift
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,18 @@ public func meanAbsoluteError<Scalar: TensorFlowFloatingPoint>(
return abs(expected - predicted).mean()
}

/// Returns the hinge loss between predictions and expectations.
///
/// - Parameters:
/// - predicted: Predicted outputs from a neural network.
/// - expected: Expected values, i.e. targets, that correspond to the correct output.
@differentiable(wrt: predicted)
public func hingeLoss<Scalar: TensorFlowFloatingPoint>(
predicted: Tensor<Scalar>, expected: Tensor<Scalar>
) -> Tensor<Scalar> {
return max(Tensor(1) - expected * predicted, Tensor(0)).mean()
}

/// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
///
/// - Parameters:
Expand Down
11 changes: 11 additions & 0 deletions Tests/TensorFlowTests/LossTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,17 @@ final class LossTests: XCTestCase {
assertElementsEqual(expected: expectedGradients, actual: gradients)
}

func testHingeLoss() {
let predicted = Tensor<Float>(shape: [2, 4], scalars: [1, 2, 3, 4, 5, 6, 7, 8])
let expected = Tensor<Float>(
shape: [2, 4],
scalars: [0.1, 0.2, 0.3, 0.4, 0.4, 0.3, 0.2, 0.1])

let loss = hingeLoss(predicted: predicted, expected: expected)
let expectedLoss: Float = 0.225
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
}

func testSoftmaxCrossEntropyWithProbabilitiesLoss() {
let logits = Tensor<Float>(shape: [2, 4], scalars: [1, 2, 3, 4, 5, 6, 7, 8])
let labels = Tensor<Float>(
Expand Down