Skip to content
This repository was archived by the owner on Apr 23, 2025. It is now read-only.

work around duplicate definition of diff witness #234

Merged
merged 1 commit into from
Dec 2, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion MiniGo/Models/GoModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ struct ConvBN: Layer {
) {
// TODO(jekbradbury): thread through bias and affine boolean arguments
// (behavior is correct for inference but this should be changed for training)
self.conv = Conv2D(filterShape: filterShape, strides: strides, padding: padding)
self.conv = Conv2D(
filterShape: filterShape, strides: strides, padding: padding, activation: identity)
self.norm = BatchNorm(featureCount: filterShape.3, momentum: 0.95, epsilon: 1e-5)
}

Expand Down
3 changes: 2 additions & 1 deletion Models/ImageClassification/DenseNet121.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@ extension DenseNet121 {
conv = Conv2D(
filterShape: (filterSize, filterSize, inputFilterCount, outputFilterCount),
strides: (stride, stride),
padding: .same
padding: .same,
activation: identity
)
}

Expand Down
2 changes: 1 addition & 1 deletion Models/ImageClassification/LeNet-5.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public struct LeNet: Layer {
public var flatten = Flatten<Float>()
public var fc1 = Dense<Float>(inputSize: 400, outputSize: 120, activation: relu)
public var fc2 = Dense<Float>(inputSize: 120, outputSize: 84, activation: relu)
public var fc3 = Dense<Float>(inputSize: 84, outputSize: 10)
public var fc3 = Dense<Float>(inputSize: 84, outputSize: 10, activation: identity)

public init() {}

Expand Down
3 changes: 2 additions & 1 deletion Models/ImageClassification/ResNet50.swift
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ public struct ConvBN: Layer {
strides: (Int, Int) = (1, 1),
padding: Padding = .valid
) {
self.conv = Conv2D(filterShape: filterShape, strides: strides, padding: padding)
self.conv = Conv2D(
filterShape: filterShape, strides: strides, padding: padding, activation: identity)
self.norm = BatchNorm(featureCount: filterShape.3)
}

Expand Down
3 changes: 2 additions & 1 deletion Models/ImageClassification/ResNetV2.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ public struct Conv2DBatchNorm: Layer {
strides: (Int, Int) = (1, 1),
padding: Padding = .valid
) {
self.conv = Conv2D(filterShape: filterShape, strides: strides, padding: padding)
self.conv = Conv2D(
filterShape: filterShape, strides: strides, padding: padding, activation: identity)
self.norm = BatchNorm(featureCount: filterShape.3)
}

Expand Down
12 changes: 8 additions & 4 deletions Models/ImageClassification/WideResNet.swift
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,17 @@ public struct BatchNormConv2DBlock: Layer {
self.conv1 = Conv2D(
filterShape: (kernelSize, kernelSize, featureCounts.0, featureCounts.1),
strides: strides,
padding: padding)
padding: padding,
activation: identity)
self.norm2 = BatchNorm(featureCount: featureCounts.1)
self.conv2 = Conv2D(filterShape: (kernelSize, kernelSize, featureCounts.1, featureCounts.1),
strides: (1, 1),
padding: padding)
padding: padding,
activation: identity)
self.shortcut = Conv2D(filterShape: (1, 1, featureCounts.0, featureCounts.1),
strides: strides,
padding: padding)
padding: padding,
activation: identity)
self.isExpansion = featureCounts.1 != featureCounts.0 || strides != (1, 1)
}

Expand Down Expand Up @@ -102,7 +105,8 @@ public struct WideResNet: Layer {
public var classifier: Dense<Float>

public init(depthFactor: Int = 2, widenFactor: Int = 8) {
self.l1 = Conv2D(filterShape: (3, 3, 3, 16), strides: (1, 1), padding: .same)
self.l1 = Conv2D(
filterShape: (3, 3, 3, 16), strides: (1, 1), padding: .same, activation: identity)

self.l2 = WideResNetBasicBlock(
featureCounts: (16, 16 * widenFactor), depthFactor: depthFactor, initialStride: (1, 1))
Expand Down