Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Refactor cosine similarity and add cosine distance #240

Merged
merged 11 commits into from
Jun 16, 2019
13 changes: 0 additions & 13 deletions Sources/TensorFlow/Loss.swift
Original file line number Diff line number Diff line change
Expand Up @@ -102,19 +102,6 @@ public func hingeLoss<Scalar: TensorFlowFloatingPoint>(
return max(Tensor(1) - expected * predicted, Tensor(0)).mean()
}

/// Returns the cosine similarity between predictions and expectations.
///
/// - Parameters:
/// - predicted: Predicted outputs from a neural network.
/// - expected: Expected values, i.e. targets, that correspond to the correct output.
@differentiable(wrt: (predicted, expected))
public func cosineSimilarity<Scalar: TensorFlowFloatingPoint>(
predicted: Tensor<Scalar>, expected: Tensor<Scalar>
) -> Tensor<Scalar> {
return -(expected * predicted).sum() /
(sqrt(expected.squared().sum()) * sqrt(predicted.squared().sum()))
}

/// Returns the squared hinge loss between predictions and expectations.
///
/// - Parameters:
Expand Down
17 changes: 17 additions & 0 deletions Sources/TensorFlow/Operators/Math.swift
Original file line number Diff line number Diff line change
Expand Up @@ -774,6 +774,23 @@ public func rsqrt<T: TensorFlowFloatingPoint>(_ x: Tensor<T>) -> Tensor<T> {
Raw.rsqrt(x)
}

/// Returns the cosine similarity between `x` and `y`.
@differentiable
public func cosineSimilarity<Scalar: TensorFlowFloatingPoint>(
_ x: Tensor<Scalar>, _ y: Tensor<Scalar>
) -> Tensor<Scalar> {
(x * y).sum() / (sqrt(x.squared().sum()) * sqrt(y.squared().sum()))
}

/// Returns the cosine distance between `x` and `y`. Cosine distance is defined as
/// `1 - cosineSimilarity(x, y)`.
@differentiable
public func cosineDistance<Scalar: TensorFlowFloatingPoint>(
_ x: Tensor<Scalar>, _ y: Tensor<Scalar>
) -> Tensor<Scalar> {
1 - cosineSimilarity(x, y)
}

@inlinable
internal func _vjpRsqrt<T: TensorFlowFloatingPoint>(
_ x: Tensor<T>
Expand Down
9 changes: 0 additions & 9 deletions Tests/TensorFlowTests/LossTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,6 @@ final class LossTests: XCTestCase {
let expectedLoss: Float = 0.225
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
}

func testCosineSimilarityLoss() {
let predicted = Tensor<Float>([1, 2, 3, 4, 5, 6, 7, 8])
let expected = Tensor<Float>([0.5, 1, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0])
let loss = cosineSimilarity(predicted: predicted, expected: expected)
let expectedLoss: Float = -1.0
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
}

func testSquaredHingeLoss() {
let predicted = Tensor<Float>([1, 2, 3, 4, 5, 6, 7, 8])
Expand Down Expand Up @@ -245,7 +237,6 @@ final class LossTests: XCTestCase {
("testHingeLoss", testHingeLoss),
("testKullbackLeiblerDivergence", testKullbackLeiblerDivergence),
("testCategoricalHingeLoss", testCategoricalHingeLoss),
("testCosineSimilarityLoss", testCosineSimilarityLoss),
("testSquaredHingeLoss", testSquaredHingeLoss),
("testPoissonLoss", testPoissonLoss),
("testSoftmaxCrossEntropyWithProbabilitiesLoss",
Expand Down
11 changes: 10 additions & 1 deletion Tests/TensorFlowTests/OperatorTests/MathTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,14 @@ final class MathOperatorTests: XCTestCase {
assertEqual(y, log(1 + x), accuracy: 0.0001)
}

func testCosineSimilarity() {
let x = Tensor<Float>([1, 2, 3, 4, 5, 6, 7, 8])
let y = Tensor<Float>([0.5, 1, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0])
let z = cosineSimilarity(x, y)
let output: Float = 1.0
XCTAssertEqual(z, Tensor(output))
}

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you please also add a test for cosine distance?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@saeta, actually cosine distance just returns 1 - cosineSimilarity, so in effect we would be testing the same function. To avoid redundancy I didn't add a test

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Even though the function is implemented as a trivial wrapper, it's good practice to assume the implementation can change in the future for whatever reasons and have tests be implementation-agnostic.

// FIXME(https://bugs.swift.org/browse/TF-543): Disable failing test.
/*
func testExpm1() {
Expand Down Expand Up @@ -118,7 +126,7 @@ final class MathOperatorTests: XCTestCase {
x.variance(squeezingAxes: 0),
Tensor(shape: [5], scalars: [0, 0, 0, 0, 0]))
XCTAssertEqual(
x.variance(alongAxes: 0),
x.variance(alongAxes: 0),
Tensor(shape: [5], scalars: [0, 0, 0, 0, 0]))
XCTAssertEqual(
x.variance(squeezingAxes: 1),
Expand Down Expand Up @@ -280,6 +288,7 @@ final class MathOperatorTests: XCTestCase {
// ("testExpm1", testExpm1),
("testSign", testSign),
("testReduction", testReduction),
("testCosineSimilarity", testCosineSimilarity),
("testArgmax", testArgmax),
("testCeilAndFloor", testCeilAndFloor),
("testSimpleMath", testSimpleMath),
Expand Down