Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit b62213a

Browse files
committed
Add workarounds for TF-25 and TF-189.
1 parent f84841c commit b62213a

File tree

2 files changed

+8
-6
lines changed

2 files changed

+8
-6
lines changed

Sources/DeepLearning/Layer.swift

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ public struct Dense<Scalar>: Layer
7070

7171
public var weight: Tensor<Scalar>
7272
public var bias: Tensor<Scalar>
73-
public typealias Activation = @autodiff (Tensor<Scalar>) -> Tensor<Scalar>
73+
public typealias Activation = @differentiable (Tensor<Scalar>) -> Tensor<Scalar>
7474
@noDerivative public let activation: Activation
7575

7676
// FIXME(SR-9716): Remove this once the bug is fixed or worked around.
@@ -321,7 +321,8 @@ public struct Dropout<Scalar>: Layer
321321
Scalar.RawSignificand : FixedWidthInteger {
322322
@noDerivative public let probability: Double
323323
@noDerivative public let learningPhaseIndicator: LearningPhaseIndicator
324-
// Workaround for TF-8
324+
// Workaround for TF-189, making `Dropout` have a non-trivial parameter
325+
// convention.
325326
var _unused: Tensor<Scalar>
326327

327328
public init(
@@ -374,9 +375,10 @@ public struct UpSampling2D<Scalar>: Layer
374375

375376
@noDerivative public let size: Int32
376377

377-
public init(size: Int32) {
378-
self.size = size
379-
}
378+
// FIXME(TF-25): Uncomment when the derived conformances bug is fixed.
379+
// public init(size: Int32) {
380+
// self.size = size
381+
// }
380382

381383
@differentiable(wrt: (self, input))
382384
public func applied(to input: Tensor<Scalar>) -> Tensor<Scalar> {

Tests/DeepLearningTests/TrivialModelTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ final class TrivialModelTests: XCTestCase {
3434
let x: Tensor<Float> = [[0, 0], [0, 1], [1, 0], [1, 1]]
3535
let y: Tensor<Float> = [0, 1, 1, 0]
3636
for _ in 0..<1000 {
37-
let (loss, 𝛁model) = classifier.valueWithGradient { classifier -> Tensor<Float> in
37+
let (_, 𝛁model) = classifier.valueWithGradient { classifier -> Tensor<Float> in
3838
let ŷ = classifier.applied(to: x)
3939
return meanSquaredError(predicted: ŷ, expected: y)
4040
}

0 commit comments

Comments
 (0)