Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit 7e2212e

Browse files
authored
Fix argument documentation (#654)
* Fix argument documentation This decay is not weight decay, so should be renamed to not be confusing. * Change argument documentation * Change precondition * Update SGD.swift
1 parent 3d4bbce commit 7e2212e

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

Sources/TensorFlow/Optimizers/MomentumBased.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ public class RMSProp<Model: Differentiable>: Optimizer
3030
public var rho: Float
3131
/// A small scalar added to the denominator to improve numerical stability.
3232
public var epsilon: Float
33-
/// The weight decay.
33+
/// The learning rate decay.
3434
public var decay: Float
3535
/// The step count.
3636
public var step: Float = 0
@@ -46,7 +46,7 @@ public class RMSProp<Model: Differentiable>: Optimizer
4646
) {
4747
precondition(learningRate >= 0, "Learning rate must be non-negative")
4848
precondition(rho >= 0, "Rho must be non-negative")
49-
precondition(decay >= 0, "Weight decay must be non-negative")
49+
precondition(decay >= 0, "Learning rate decay must be non-negative")
5050

5151
self.learningRate = learningRate
5252
self.rho = rho

Sources/TensorFlow/Optimizers/SGD.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ public class SGD<Model: Differentiable>: Optimizer
2525
/// The momentum factor. It accelerates stochastic gradient descent in the relevant direction
2626
/// and dampens oscillations.
2727
public var momentum: Float
28-
/// The weight decay.
28+
/// The learning rate decay.
2929
public var decay: Float
3030
/// Use Nesterov momentum if true.
3131
public var nesterov: Bool
@@ -43,7 +43,7 @@ public class SGD<Model: Differentiable>: Optimizer
4343
) {
4444
precondition(learningRate >= 0, "Learning rate must be non-negative")
4545
precondition(momentum >= 0, "Momentum must be non-negative")
46-
precondition(decay >= 0, "Weight decay must be non-negative")
46+
precondition(decay >= 0, "Learning rate decay must be non-negative")
4747

4848
self.learningRate = learningRate
4949
self.momentum = momentum

0 commit comments

Comments
 (0)