Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Add model summary #1067

Merged
merged 30 commits into from
Sep 9, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
73e0971
Initial annotations prototype
texasmichelle Apr 22, 2020
fb15e7a
Lint
texasmichelle Apr 22, 2020
aaa19cf
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle Apr 22, 2020
68d71dd
Merge branch 'master' into annotations
texasmichelle Apr 27, 2020
72c52d4
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle Apr 28, 2020
f15b0e3
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle Apr 29, 2020
2bb4ff8
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle Apr 30, 2020
ce6f4da
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle May 1, 2020
e86750d
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle May 4, 2020
b42f61f
Merge remote-tracking branch 'upstream/master' into annotations
texasmichelle May 5, 2020
c40d752
Merge remote-tracking branch 'upstream/master' into annotations2
texasmichelle Aug 6, 2020
954826c
Merge remote-tracking branch 'upstream/master' into annotations2
texasmichelle Aug 14, 2020
8e017f2
Merge remote-tracking branch 'upstream/master' into annotations2
texasmichelle Aug 25, 2020
db5b80b
Add Dan's workaround for SR-13455
texasmichelle Aug 27, 2020
c3d064c
Get tests running
texasmichelle Aug 28, 2020
b191067
Add docstrings
texasmichelle Aug 28, 2020
a2d7218
Add summary formatting
texasmichelle Aug 29, 2020
8168ede
lint
texasmichelle Aug 29, 2020
20aa0e9
Add docstring for `formatAnnotations()`
texasmichelle Aug 29, 2020
17a6f7c
Add additional layer types
texasmichelle Aug 29, 2020
bdcfd9a
Add Convolutional
texasmichelle Sep 2, 2020
c525148
Add Embedding
texasmichelle Sep 3, 2020
116433a
Lint
texasmichelle Sep 3, 2020
7f5f2d9
Replace concrete types with aliases
texasmichelle Sep 3, 2020
aa8b631
Move annotations to the end and add docstrings
texasmichelle Sep 3, 2020
c96ba98
Add documentation
texasmichelle Sep 3, 2020
77c16c1
Revert use of typealias
texasmichelle Sep 3, 2020
087044b
Remove typealiases
texasmichelle Sep 3, 2020
dee1afb
Remove last few typealias refs
texasmichelle Sep 3, 2020
249e3e9
Remove paste artifact
texasmichelle Sep 3, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions Documentation/X10/SUMMARY.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Model Summaries

A summary provides details about the architecture of a model, such as layer
types and shapes.

The design proposal can be found [here][design]. This
implementation is a WIP, so please file an [Issue][new_issue] with
enhancements you would like to see or problems you run into.

**Note:** Model summaries are currently supported on the X10 backend only.

## Viewing a model summary

Create an X10 device and model.

```
import TensorFlow

public struct MyModel: Layer {
public var dense1 = Dense<Float>(inputSize: 1, outputSize: 1)
public var dense2 = Dense<Float>(inputSize: 4, outputSize: 4)
public var dense3 = Dense<Float>(inputSize: 4, outputSize: 4)
public var flatten = Flatten<Float>()

@differentiable
public func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
let layer1 = dense1(input)
let layer2 = layer1.reshaped(to: [1, 4])
let layer3 = dense2(layer2)
let layer4 = dense3(layer3)
return flatten(layer4)
}
}

let device = Device.defaultXLA
let model0 = MyModel()
let model = MyModel(copying: model0, to: device)
```

Create an input tensor.

```
let input = Tensor<Float>(repeating: 1, shape: [1, 4, 1, 1], on: device)
```

Generate a summary of your model.

```
let summary = model.summary(input: input)
print(summary)
```

```
Layer Output Shape Attributes
=============================== ==================== ======================
Dense<Float> [1, 4, 1, 1]
Dense<Float> [1, 4]
Dense<Float> [1, 4]
Flatten<Float> [1, 4]
```

**Note:** the `summary()` function executes the model in order to obtain
details about its architecture.


[design]: https://docs.google.com/document/d/1hEhMiwLtuzsN3RvIC3FAh6NvtTimU8o_qdzMkGvntVg/view
[new_issue]: https://github.com/tensorflow/swift-apis/issues/new
131 changes: 70 additions & 61 deletions Sources/TensorFlow/Core/Tensor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,67 +41,6 @@ public struct Tensor<Scalar: TensorFlowScalar> {
}
}

public protocol TensorProtocol {
associatedtype Scalar: TensorFlowScalar
init(repeating repeatedValue: Scalar, shape: TensorShape, on device: Device)
var annotations: String { get }
var shape: TensorShape { get }
var summary: String { get }
}

public protocol DifferentiableTensorProtocol:
TensorProtocol & Differentiable & EuclideanDifferentiable
where Scalar: TensorFlowFloatingPoint {
@differentiable(wrt: self)
func annotate(_ annotation: String) -> Self
}

extension Tensor: TensorProtocol & DifferentiableTensorProtocol
where Scalar: TensorFlowFloatingPoint {

public var annotations: String {
#if USING_X10_BACKEND
switch handle.backend {
case .XLA:
let rawAnnotations = XLATensor.annotations(xlaTensor)

// TODO(michellecasbon): Add formatting.

return rawAnnotations

case .TF_EAGER:
return Device.defaultTFEager.annotationsAvailable
}
#else
return "Annotations not available in TF_EAGER."
#endif
}

public var summary: String { annotations }

@differentiable(wrt: self)
public func annotate(_ annotation: String) -> Tensor<Scalar> {
#if USING_X10_BACKEND
switch handle.backend {
case .XLA:
return Tensor<Scalar>(_xla: XLATensor.annotate(xlaTensor, annotation))
case .TF_EAGER:
return self
}
#else
return self
#endif
}

@derivative(of: annotate)
@usableFromInline
func vjpAnnotate(_ annotation: String) -> (
value: Tensor<Scalar>, pullback: (Tensor<Scalar>) -> Tensor<Scalar>
) {
(annotate(annotation), { $0 })
}
}

extension Tensor: AnyTensor {
public var _rawTensorHandle: CTensorHandle { return handle._cTensorHandle }
public var _tensorFlowDataType: TensorDataType { return Scalar.tensorFlowDataType }
Expand Down Expand Up @@ -835,3 +774,73 @@ extension Tensor: Differentiable & EuclideanDifferentiable where Scalar: TensorF
}
}
#endif

//===------------------------------------------------------------------------------------------===//
// Annotations
//===------------------------------------------------------------------------------------------===//

public protocol TensorProtocol {
associatedtype Scalar: TensorFlowScalar
init(repeating repeatedValue: Scalar, shape: TensorShape, on device: Device)
var annotations: String { get }
var shape: TensorShape { get }
var summary: String { get }
}

public protocol DifferentiableTensorProtocol:
TensorProtocol & Differentiable & EuclideanDifferentiable
where Scalar: TensorFlowFloatingPoint {
@differentiable(wrt: self)
func annotate(_ annotation: String) -> Self
}

extension Tensor: TensorProtocol {
/// The annotations describing this tensor.
public var annotations: String {
#if USING_X10_BACKEND
switch handle.backend {
case .XLA:
return XLATensor.annotations(xlaTensor)
case .TF_EAGER:
return Device.defaultTFEager.annotationsAvailable
}
#else
return "Annotations not available in TF_EAGER."
#endif
}

/// An alias for annotations.
public var summary: String { annotations }
}

extension Tensor: DifferentiableTensorProtocol
where Scalar: TensorFlowFloatingPoint {
/// Adds an annotation.
///
/// Note: Only X10 is supported. For other backends, umodified `self` is
/// returned.
///
/// - Parameter annotation: The annotation to be added.
/// - Returns: The annotated tensor.
@differentiable(wrt: self)
public func annotate(_ annotation: String) -> Tensor<Scalar> {
#if USING_X10_BACKEND
switch handle.backend {
case .XLA:
return Tensor<Scalar>(_xla: XLATensor.annotate(xlaTensor, annotation))
case .TF_EAGER:
return self
}
#else
return self
#endif
}

@derivative(of: annotate)
@usableFromInline
func vjpAnnotate(_ annotation: String) -> (
value: Tensor<Scalar>, pullback: (Tensor<Scalar>) -> Tensor<Scalar>
) {
(annotate(annotation), { $0 })
}
}
137 changes: 137 additions & 0 deletions Sources/TensorFlow/Layer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation
import _Differentiation

public protocol Module: EuclideanDifferentiable, KeyPathIterable
Expand All @@ -20,6 +21,7 @@ where
{
/// The input type of the layer.
associatedtype Input

/// The output type of the layer.
associatedtype Output: Differentiable

Expand All @@ -29,6 +31,119 @@ where
/// - Returns: The output.
@differentiable(wrt: self)
func callAsFunction(_ input: Input) -> Output

/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable(wrt: self)
func forward(_ input: Input) -> Output
}

extension Module {
/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable(wrt: self)
public func forward(_ input: Input) -> Output {
return callAsFunction(input)
}
}

extension Module where Input: TensorProtocol, Output: DifferentiableTensorProtocol {
/// Returns the annotated output obtained from applying the layer to the
/// given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The annotated output.
@differentiable(wrt: self)
public func callAsFunction(_ input: Input) -> Output {
let activation = forward(input)
return annotated(activation)
}

/// Annotates `output`.
///
/// Note: Returns `output` if using a backend that does not support annotations.
///
/// - Parameter output: The output to the layer.
/// - Returns: The annotated output.
@differentiable
public func annotated(_ output: Output) -> Output {
#if USING_X10_BACKEND
let annotated = output.annotate("type=\(Self.self)")
return annotated
#else
return output
#endif
}

/// Returns the annotations obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: All collected annotations from the XLA graph.
public func summary(input: Input) -> String {
let output = self.callAsFunction(input)
return formatAnnotations(from: output)
}

/// Returns a formatted version of `tensor.annotations`.
///
/// - Parameter tensor: The output to the layer.
/// - Returns: A formatted summary of `tensor.annotations`.
private func formatAnnotations(from tensor: Output) -> String {
#if USING_X10_BACKEND
let rawAnnotations = tensor.annotations
if rawAnnotations == Device.defaultTFEager.annotationsAvailable {
return rawAnnotations
}

let lines = rawAnnotations.components(separatedBy: "\n")

if lines.count < 3 {
return ""
}

// Isolate layers.
let pattern = "\\s*shape=(.+)\\s+type=([^\\s]+)(\\s+.+=.+)?$"
let regex = try! NSRegularExpression(pattern: pattern)
let contents = lines.filter { $0.contains("shape=") }
.map { line -> String in
let nsrange = NSRange(line.startIndex..., in: line)
if let match = regex.firstMatch(in: line, range: nsrange) {
var content = ""
if let typeRange = Range(match.range(at: 2), in: line) {
let type = line[typeRange]
content += type
}
content += "\t\t\t"
if let shapeRange = Range(match.range(at: 1), in: line) {
let shape = line[shapeRange]
content += shape
}
content += "\t\t"
if let attributesRange = Range(match.range(at: 3), in: line) {
let attribute = line[attributesRange]
content += attribute
}
return content
} else {
return line
}
}

let formattedAnnotations = """
Layer Output Shape Attributes
=============================== ==================== ======================
\(contents.joined(separator: "\n"))
"""

return formattedAnnotations
#else
return tensor.annotations
#endif
}
}

/// A neural network layer.
Expand All @@ -45,6 +160,28 @@ public protocol Layer: Module where Input: Differentiable {
/// - Returns: The output.
@differentiable
func callAsFunction(_ input: Input) -> Output

@differentiable
func forward(_ input: Input) -> Output
}

extension Layer {
// Workaround for SR-13455: autodiff undefined symbol linker error.
@differentiable(wrt: self)
@differentiable
public func forward(_ input: Input) -> Output {
return callAsFunction(input)
}
}

extension Layer where Input: DifferentiableTensorProtocol, Output: DifferentiableTensorProtocol {
// Workaround for SR-13455: autodiff undefined symbol linker error.
@differentiable(wrt: self)
@differentiable
public func callAsFunction(_ input: Input) -> Output {
let activation = forward(input)
return annotated(activation)
}
}

/// An empty struct representing empty `TangentVector`s for parameterless layers.
Expand Down
Loading