File tree Expand file tree Collapse file tree 8 files changed +2
-130
lines changed
FastaiNotebook_07_batchnorm/Sources/FastaiNotebook_07_batchnorm
FastaiNotebook_08_data_block/Sources/FastaiNotebook_08_data_block
FastaiNotebook_08a_heterogeneous_dictionary/Sources/FastaiNotebook_08a_heterogeneous_dictionary
FastaiNotebook_08c_data_block_generic/Sources/FastaiNotebook_08c_data_block_generic
FastaiNotebook_09_optimizer/Sources/FastaiNotebook_09_optimizer
FastaiNotebook_10_mixup_ls/Sources/FastaiNotebook_10_mixup_ls
FastaiNotebook_11_imagenette/Sources/FastaiNotebook_11_imagenette
Runnable11/Sources/Runnable11 Expand file tree Collapse file tree 8 files changed +2
-130
lines changed Original file line number Diff line number Diff line change @@ -26,30 +26,13 @@ public protocol LearningPhaseDependent: FALayer {
26
26
}
27
27
28
28
extension LearningPhaseDependent {
29
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
30
- // protocol requirement, even though there is a `@derivative(of: forward)` method below.
31
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
32
- // some require it. Investigate.
33
- @differentiable ( vjp: gradForward)
29
+ @differentiable
34
30
public func forward( _ input: Input ) -> Output {
35
31
switch Context . local. learningPhase {
36
32
case . training: return forwardTraining ( input)
37
33
case . inference: return forwardInference ( input)
38
34
}
39
35
}
40
-
41
- @usableFromInline
42
- // @derivative(of: forward)
43
- func gradForward( _ input: Input ) ->
44
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
45
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
46
- switch Context . local. learningPhase {
47
- case . training:
48
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
49
- case . inference:
50
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
51
- }
52
- }
53
36
}
54
37
55
38
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -24,29 +24,13 @@ public protocol LearningPhaseDependent: FALayer {
24
24
}
25
25
26
26
extension LearningPhaseDependent {
27
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
28
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
29
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
30
- // some require it. Investigate.
31
27
@differentiable
32
28
public func forward( _ input: Input ) -> Output {
33
29
switch Context . local. learningPhase {
34
30
case . training: return forwardTraining ( input)
35
31
case . inference: return forwardInference ( input)
36
32
}
37
33
}
38
-
39
- @differentiating ( forward)
40
- func gradForward( _ input: Input ) ->
41
- ( value: Output , pullback: ( Self . Output . TangentVector ) ->
42
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
43
- switch Context . local. learningPhase {
44
- case . training:
45
- return valueWithPullback ( at: input) { $0. forwardTraining ( $1) }
46
- case . inference:
47
- return valueWithPullback ( at: input) { $0. forwardInference ( $1) }
48
- }
49
- }
50
34
}
51
35
52
36
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
Original file line number Diff line number Diff line change @@ -22,28 +22,13 @@ public protocol LearningPhaseDependent: FALayer {
22
22
}
23
23
24
24
public extension LearningPhaseDependent {
25
- // This `@differentiable` attribute is necessary, to tell the compiler that this satisfies the FALayer
26
- // protocol requirement, even though there is a `@differentiating(forward)` method below.
27
- // TODO: It seems nondeterministically necessary. Some subsequent notebooks import this successfully without it,
28
- // some require it. Investigate.
29
- @differentiable ( vjp: gradForward)
25
+ @differentiable
30
26
public func forward( _ input: Input ) -> Output {
31
27
switch Context . local. learningPhase {
32
28
case . training: return forwardTraining ( to: input)
33
29
case . inference: return forwardInference ( to: input)
34
30
}
35
31
}
36
-
37
- func gradForward( _ input: Input ) ->
38
- ( Output , ( Self . Output . TangentVector ) ->
39
- ( Self . TangentVector , Self . Input . TangentVector ) ) {
40
- switch Context . local. learningPhase {
41
- case . training:
42
- return valueWithPullback ( at: input) { $0. forwardTraining ( to: $1) }
43
- case . inference:
44
- return valueWithPullback ( at: input) { $0. forwardInference ( to: $1) }
45
- }
46
- }
47
32
}
48
33
49
34
public protocol Norm : Layer where Input == Tensor < Scalar > , Output == Tensor < Scalar > {
You can’t perform that action at this time.
0 commit comments