Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Adding Upsampling 3D layer and tests for upsampling layers #112

Merged
merged 11 commits into from
Apr 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 30 additions & 1 deletion Sources/DeepLearning/Layer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1169,6 +1169,35 @@ public struct UpSampling2D<Scalar: TensorFlowFloatingPoint>: Layer {
}
}

/// An upsampling layer for 3-D inputs.
@_fixed_layout
public struct UpSampling3D<Scalar: TensorFlowFloatingPoint>: Layer {
@noDerivative public let size: Int

/// Creates an upsampling layer.
///
/// - Parameter size: The upsampling factor for rows and columns.
public init(size: Int) {
self.size = size
}

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable
public func call(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
let shape = input.shape
let (batchSize, height, width, depth, channels) =
(shape[0], shape[1], shape[2], shape[3], shape[4])
let scaleOnes = Tensor<Scalar>(ones: [1, 1, size, 1, size, 1, size, 1])
let upSampling = input.reshaped(
to: [batchSize, height, 1, width, 1, depth, 1, channels]) * scaleOnes
return upSampling.reshaped(
to: [batchSize, height * size, width * size, depth * size, channels])
}
}

/// A flatten layer.
///
/// A flatten layer flattens the input when applied without affecting the batch size.
Expand Down Expand Up @@ -1409,7 +1438,7 @@ public struct RNN<Cell: RNNCell>: Layer {
public typealias Output = [Cell.TimeStepOutput]

public var cell: Cell

public init(_ cell: @autoclosure () -> Cell) {
self.cell = cell()
}
Expand Down
30 changes: 30 additions & 0 deletions Tests/DeepLearningTests/LayerTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,33 @@ final class LayerTests: XCTestCase {
XCTAssertEqual(output, expected)
}

func testUpSampling1D() {
let size = 6
let layer = UpSampling1D<Float>(size: size)
let input = Tensor<Float>(shape: [1, 10, 1], scalars: (0..<10).map(Float.init))
let output = layer.inferring(from: input)
let expected = TensorShape([1, input.shape[1] * size, 1])
XCTAssertEqual(output.shape, expected)
}

func testUpSampling2D() {
let size = 6
let layer = UpSampling2D<Float>(size: size)
let input = Tensor<Float>(shape: [1, 3, 5, 1], scalars: (0..<15).map(Float.init))
let output = layer.inferring(from: input)
let expected = TensorShape([1, input.shape[1] * size, input.shape[2] * size, 1])
XCTAssertEqual(output.shape, expected)
}

func testUpSampling3D() {
let size = 6
let layer = UpSampling3D<Float>(size: size)
let input = Tensor<Float>(shape: [1, 4, 3, 2, 1], scalars: (0..<24).map(Float.init))
let output = layer.inferring(from: input)
let expected = TensorShape([1, input.shape[1] * size, input.shape[2] * size, input.shape[3] * size, 1])
XCTAssertEqual(output.shape, expected)
}

func testReshape() {
let layer = Reshape<Float>(shape: [10, 2, 1])
let input = Tensor(shape: [20, 1], scalars: (0..<20).map(Float.init))
Expand Down Expand Up @@ -127,6 +154,9 @@ final class LayerTests: XCTestCase {
("testGlobalAvgPool1D", testGlobalAvgPool1D),
("testGlobalAvgPool2D", testGlobalAvgPool2D),
("testGlobalAvgPool3D", testGlobalAvgPool3D),
("testUpSampling1D", testUpSampling1D),
("testUpSampling2D", testUpSampling2D),
("testUpSampling3D", testUpSampling3D),
("testReshape", testReshape),
("testFlatten", testFlatten),
("testSimpleRNNCell", testSimpleRNNCell),
Expand Down