Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit 88ebf99

Browse files
tanmayb123rxwei
authored andcommitted
Add 1D version of UpSampling (#61)
1 parent d87fab3 commit 88ebf99

File tree

1 file changed

+29
-0
lines changed

1 file changed

+29
-0
lines changed

Sources/DeepLearning/Layer.swift

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1011,6 +1011,35 @@ public struct Dropout<Scalar: TensorFlowFloatingPoint>: Layer {
10111011
}
10121012
}
10131013

1014+
/// An upsampling layer for 1-D inputs.
1015+
@_fixed_layout
1016+
public struct UpSampling1D<Scalar: TensorFlowFloatingPoint>: Layer {
1017+
@noDerivative public let size: Int32
1018+
1019+
/// Creates an upsampling layer.
1020+
///
1021+
/// - Parameter size: The upsampling factor for timesteps.
1022+
public init(size: Int32) {
1023+
self.size = size
1024+
}
1025+
1026+
/// Returns the output obtained from applying the layer to the given input.
1027+
///
1028+
/// - Parameters:
1029+
/// - input: The input to the layer.
1030+
/// - context: The contextual information for the layer application, e.g. the current learning
1031+
/// phase.
1032+
/// - Returns: The output.
1033+
@differentiable
1034+
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
1035+
let shape = input.shape
1036+
let (batchSize, timesteps, channels) = (shape[0], shape[1], shape[2])
1037+
let scaleOnes = Tensor<Scalar>(ones: [1, 1, size, 1])
1038+
let upSampling = input.reshaped(to: [batchSize, timesteps, 1, channels]) * scaleOnes
1039+
return upSampling.reshaped(to: [batchSize, timesteps * size, channels])
1040+
}
1041+
}
1042+
10141043
/// An upsampling layer for 2-D inputs.
10151044
@_fixed_layout
10161045
public struct UpSampling2D<Scalar: TensorFlowFloatingPoint>: Layer {

0 commit comments

Comments
 (0)