12
12
// See the License for the specific language governing permissions and
13
13
// limitations under the License.
14
14
15
- /// Computes the mean squared error between predictions and labels.
15
+ /// Returns the mean squared error between predictions and labels.
16
16
///
17
17
/// - Parameters:
18
18
/// - predicted: Predicted outputs from a neural network.
@@ -41,7 +41,7 @@ public func meanSquaredLogarithmicError<Scalar: TensorFlowFloatingPoint>(
41
41
return ( logPredicted - logExpected) . squared ( ) . mean ( )
42
42
}
43
43
44
- /// Computes the mean absolute error between predictions and expectations.
44
+ /// Returns the mean absolute error between predictions and expectations.
45
45
///
46
46
/// - Parameters:
47
47
/// - predicted: Predicted outputs from a neural network.
@@ -115,7 +115,7 @@ public func kullbackLeiblerDivergence<Scalar: TensorFlowFloatingPoint>(
115
115
return ( expected * log( expected / predicted) ) . sum ( )
116
116
}
117
117
118
- /// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
118
+ /// Returns the softmax cross entropy (categorical cross entropy) between logits and labels.
119
119
///
120
120
/// - Parameters:
121
121
/// - logits: One-hot encoded outputs from a neural network.
@@ -136,7 +136,7 @@ func _vjpSoftmaxCrossEntropy<Scalar: TensorFlowFloatingPoint>(
136
136
return ( loss. mean ( ) , { v in ( v / batchSize) * grad } )
137
137
}
138
138
139
- /// Computes the softmax cross entropy (categorical cross entropy) between logits and labels.
139
+ /// Returns the softmax cross entropy (categorical cross entropy) between logits and labels.
140
140
///
141
141
/// - Parameters:
142
142
/// - logits: Unscaled log probabilities from a neural network.
@@ -158,7 +158,7 @@ func _vjpSoftmaxCrossEntropy<Scalar: TensorFlowFloatingPoint>(
158
158
return ( loss. mean ( ) , { v in v / batchSize * grad } )
159
159
}
160
160
161
- /// Computes the sigmoid cross entropy (binary cross entropy) between logits and labels.
161
+ /// Returns the sigmoid cross entropy (binary cross entropy) between logits and labels.
162
162
///
163
163
/// The reduction is reduced over all elements. If reduced over batch size is intended, please
164
164
/// consider to scale the loss.
0 commit comments