This repository was archived by the owner on Jul 1, 2023. It is now read-only.
File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -53,7 +53,7 @@ public class Adam<Model: Layer>: Optimizer
53
53
public var beta2 : Float
54
54
/// A small scalar added to the denominator to improve numerical stability.
55
55
public var epsilon : Float
56
- /// The weight decay.
56
+ /// The learning rate decay.
57
57
public var decay : Float
58
58
/// The current step.
59
59
public var step : Int = 0
@@ -73,7 +73,7 @@ public class Adam<Model: Layer>: Optimizer
73
73
precondition ( learningRate >= 0 , " Learning rate must be non-negative " )
74
74
precondition ( 0 <= beta1 && beta1 <= 1 , " Beta parameter must be between 0 and 1 " )
75
75
precondition ( 0 <= beta2 && beta2 <= 1 , " Beta parameter must be between 0 and 1 " )
76
- precondition ( decay >= 0 , " Weight decay must be non-negative" )
76
+ precondition ( decay >= 0 , " Learning rate decay must be non-negative" )
77
77
78
78
self . learningRate = learningRate
79
79
self . beta1 = beta1
@@ -216,7 +216,7 @@ public class SGD<Model: Layer>: Optimizer
216
216
public var velocity : Model . AllDifferentiableVariables
217
217
/// The set of steps taken.
218
218
public var step : Int = 0
219
-
219
+
220
220
public init (
221
221
for model: __shared Model,
222
222
learningRate: Float = 0.01 ,
You can’t perform that action at this time.
0 commit comments