Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Adding GlobalMaxPooling 1D, 2D, 3D #75

Closed
wants to merge 3 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 77 additions & 1 deletion Sources/DeepLearning/Layer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1009,6 +1009,63 @@ public struct AvgPool2D<Scalar: TensorFlowFloatingPoint>: Layer {
}
}

/// A global max pooling layer for temporal data.
@_fixed_layout
public struct GlobalmaxPooling1D<Scalar: TensorFlowFloatingPoint>: Layer {
/// Creates a global max pooling layer.
public init() {}

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameters:
/// - input: The input to the layer.
/// - context: The contextual information for the layer application, e.g. the current learning
/// phase.
/// - Returns: The output.
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
return input.max(alongAxes: 1).reshaped(to: [input.shape[0], input.shape[2]])
}
}

/// A global max pooling layer for spatial data.
@_fixed_layout
public struct GlobalMaxPooling2D<Scalar: TensorFlowFloatingPoint>: Layer {
/// Creates a global max pooling layer.
public init() {}

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameters:
/// - input: The input to the layer.
/// - context: The contextual information for the layer application, e.g. the current learning
/// phase.
/// - Returns: The output.
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
return input.max(alongAxes: [1, 2]).reshaped(to: [input.shape[0], input.shape[3]])
}
}

/// A global max pooling layer for spatial and spatio-temporal data.
@_fixed_layout
public struct GlobalMaxPooling3D<Scalar: TensorFlowFloatingPoint>: Layer {
/// Creates a global max pooling layer.
public init() {}

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameters:
/// - input: The input to the layer.
/// - context: The contextual information for the layer application, e.g. the current learning
/// phase.
/// - Returns: The output.
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
return input.max(alongAxes: [1, 2, 3]).reshaped(to: [input.shape[0], input.shape[4]])
}
}


/// A global average pooling layer for temporal data.
@_fixed_layout
Expand All @@ -1022,7 +1079,7 @@ public struct GlobalAveragePooling1D<Scalar: TensorFlowFloatingPoint>: Layer {
/// - input: The input to the layer.
/// - context: The contextual information for the layer application, e.g. the current learning
/// phase.
/// - Returns: The output
/// - Returns: The output.
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
return input.mean(alongAxes: 1).reshaped(to: [input.shape[0], input.shape[2]])
Expand All @@ -1048,6 +1105,25 @@ public struct GlobalAveragePooling2D<Scalar: TensorFlowFloatingPoint>: Layer {
}
}

/// A global average pooling layer for spatial and spatio-temporal data.
@_fixed_layout
public struct GlobalAveragePooling3D<Scalar: TensorFlowFloatingPoint>: Layer {
/// Creates a global average pooling layer.
public init() {}

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameters:
/// - input: The input to the layer.
/// - context: The contextual information for the layer application, e.g. the current learning
/// phase.
/// - Returns: The output.
@differentiable
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
return input.mean(alongAxes: [1, 2, 3]).reshaped(to: [input.shape[0], input.shape[4]])
}
}

/// A layer that applies layer normalization over a mini-batch of inputs.
///
/// Reference: [Layer Normalization](https://arxiv.org/abs/1607.06450).
Expand Down