Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit 21066cc

Browse files
committed
review changes
1 parent 315ebf7 commit 21066cc

File tree

2 files changed

+7
-8
lines changed

2 files changed

+7
-8
lines changed

Sources/TensorFlow/Loss.swift

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -86,25 +86,24 @@ public func categoricalHingeLoss<Scalar: TensorFlowFloatingPoint>(
8686
return max(Tensor(0), negative - positive + Tensor(1))
8787
}
8888

89-
/// Helper function for Logcosh
89+
// Helper function for Logcosh
9090
@differentiable(wrt: x)
91-
internal func logcosh<Scalar: TensorFlowFloatingPoint>(
91+
fileprivate func logCosh<Scalar: TensorFlowFloatingPoint>(
9292
x: Tensor<Scalar>
9393
) -> Tensor<Scalar> {
94-
let y = Tensor<Scalar>([2])
95-
return x + softplus(Tensor(-2) * x) - log(y)
94+
return x + softplus(Tensor(-2) * x) - log(Tensor(y))
9695
}
9796

98-
/// Returns the Logcosh loss between predictions and expectations.
97+
/// Returns the logarithm of the hyperbolic cosine of the error between predictions and expectations.
9998
///
10099
/// - Parameters:
101100
/// - predicted: Predicted outputs from a neural network.
102101
/// - expected: Expected values, i.e. targets, that correspond to the correct output.
103102
@differentiable(wrt: predicted)
104-
public func logcoshLoss<Scalar: TensorFlowFloatingPoint>(
103+
public func logCoshLoss<Scalar: TensorFlowFloatingPoint>(
105104
predicted: Tensor<Scalar>, expected: Tensor<Scalar>
106105
) -> Tensor<Scalar> {
107-
return (logcosh(x: predicted - expected)).mean()
106+
return (logCosh(x: predicted - expected)).mean()
108107
}
109108

110109
/// Returns the Poisson loss between predictions and expectations.

Tests/TensorFlowTests/LossTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ final class LossTests: XCTestCase {
9696
assertElementsEqual(expected: Tensor(expectedLoss), actual: loss)
9797
}
9898

99-
func testLogcoshLoss() {
99+
func testLogCoshLoss() {
100100
let predicted = Tensor<Float>([0.2, 0.3, 0.4])
101101
let expected = Tensor<Float>([1.0, 4.0, 3.0])
102102
let loss = logcoshLoss(predicted: predicted, expected: expected)

0 commit comments

Comments
 (0)