Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit f099fad

Browse files
jon-towrxwei
authored andcommitted
Update Loss docs to reflectnon-mutating API (#232)
1 parent 3a256de commit f099fad

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

Sources/TensorFlow/Loss.swift

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
/// Computes the mean squared error between predictions and labels.
15+
/// Returns the mean squared error between predictions and labels.
1616
///
1717
/// - Parameters:
1818
/// - predicted: Predicted outputs from a neural network.
@@ -41,7 +41,7 @@ public func meanSquaredLogarithmicError<Scalar: TensorFlowFloatingPoint>(
4141
return (logPredicted - logExpected).squared().mean()
4242
}
4343

44-
/// Computes the mean absolute error between predictions and expectations.
44+
/// Returns the mean absolute error between predictions and expectations.
4545
///
4646
/// - Parameters:
4747
/// - predicted: Predicted outputs from a neural network.
@@ -115,7 +115,7 @@ public func kullbackLeiblerDivergence<Scalar: TensorFlowFloatingPoint>(
115115
return (expected * log(expected / predicted)).sum()
116116
}
117117

118-
/// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
118+
/// Returns the softmax cross entropy (categorical cross entropy) between logits and labels.
119119
///
120120
/// - Parameters:
121121
/// - logits: One-hot encoded outputs from a neural network.
@@ -136,7 +136,7 @@ func _vjpSoftmaxCrossEntropy<Scalar: TensorFlowFloatingPoint>(
136136
return (loss.mean(), { v in (v / batchSize) * grad })
137137
}
138138

139-
/// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
139+
/// Returns the softmax cross entropy (categorical cross entropy) between logits and labels.
140140
///
141141
/// - Parameters:
142142
/// - logits: Unscaled log probabilities from a neural network.
@@ -158,7 +158,7 @@ func _vjpSoftmaxCrossEntropy<Scalar: TensorFlowFloatingPoint>(
158158
return (loss.mean(), { v in v / batchSize * grad })
159159
}
160160

161-
/// Computes the sigmoid cross entropy (binary cross entropy) between logits and labels.
161+
/// Returns the sigmoid cross entropy (binary cross entropy) between logits and labels.
162162
///
163163
/// The reduction is reduced over all elements. If reduced over batch size is intended, please
164164
/// consider to scale the loss.

0 commit comments

Comments
 (0)