Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Added support for the 'log1mexp' op and its VJP. #147

Merged
merged 29 commits into from
Jun 24, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
930cf5f
Enhanced the 'matmul' wrapper so that it matches the behavior of the …
eaplatanios May 30, 2019
a557090
Added support for the 'log1mexp' op and its VJP.
eaplatanios May 30, 2019
9e75132
Added a test.
eaplatanios May 30, 2019
571a301
Update Sources/TensorFlow/Operators/Math.swift
eaplatanios May 30, 2019
2131375
Removed the need for a general 'Tensor.withoutDerivative()' as Richar…
eaplatanios May 30, 2019
1e80a1e
Addressed Richard's feedback.
eaplatanios May 30, 2019
3b60a9e
Addressed Richard's feedback.
eaplatanios May 31, 2019
9ef8db8
Added one more tests helper.
eaplatanios May 31, 2019
561a842
Minor bug fix.
eaplatanios May 31, 2019
670eabf
Merge branch 'matmul' into logm1exp
eaplatanios May 31, 2019
a01f161
Added a test for 'log1mexp'.
eaplatanios May 31, 2019
399aba6
Merge branch 'matmul' into log-sigmoid
eaplatanios May 31, 2019
a30c098
Added support for 'softplus' and 'logSigmoid'.
eaplatanios May 31, 2019
7b7585e
Minor tweak.
eaplatanios May 31, 2019
d33db18
Merged upstream changes.
eaplatanios May 31, 2019
a0384e7
Fixed some of the tests.
eaplatanios May 31, 2019
0fbac79
Made the tests pass.
eaplatanios May 31, 2019
9701780
Attempt at making 'log1mexp' differentiable.
eaplatanios Jun 1, 2019
795e2cf
Merged upstream changes.
eaplatanios Jun 20, 2019
8869b75
Merged upstream changes.
eaplatanios Jun 20, 2019
2b6a5ba
Enabled the 'logSigmoid' test.
eaplatanios Jun 20, 2019
12c96e8
Merged upstream changes.
eaplatanios Jun 20, 2019
7f260e2
Merged upstream changes.
eaplatanios Jun 21, 2019
f3ac105
Style edits.
eaplatanios Jun 21, 2019
475f30b
Style edits.
eaplatanios Jun 21, 2019
13e199f
Update Sources/TensorFlow/Operators/Math.swift
eaplatanios Jun 24, 2019
ae5e3d2
Update Sources/TensorFlow/Operators/Math.swift
eaplatanios Jun 24, 2019
aab66e7
Merge remote-tracking branch 'upstream/master' into logm1exp
eaplatanios Jun 24, 2019
51026e8
Merge branch 'logm1exp' of github.com:eaplatanios/swift-apis into log…
eaplatanios Jun 24, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion Sources/TensorFlow/Operators/Math.swift
Original file line number Diff line number Diff line change
Expand Up @@ -579,6 +579,21 @@ func _vjpLog1p<T: TensorFlowFloatingPoint>(
(log1p(x), { v in Raw.xdivy(v, 1 + x) })
}

/// Returns `log(1 - exp(x))` using a numerically stable approach.
///
/// - Note: The approach is shown in Equation 7 of:
/// https://cran.r-project.org/web/packages/Rmpfr/vignettes/log1mexp-note.pdf.
@inlinable
@differentiable
public func log1mexp<T: TensorFlowFloatingPoint>(_ x: Tensor<T>) -> Tensor<T> {
let isTooSmall = withoutDerivative(at: x) { x in -x .< T(log(2.0)) }
// This `replacing` will ultimately be a no-op because we will not select this code-path
// whenever we use the surrogate `-Tensor(onesLike: x)`.
let ones = withoutDerivative(at: x) { x in Tensor(onesLike: x) }
let xSafe = x.replacing(with: -ones, where: isTooSmall)
return log1p(-exp(xSafe)).replacing(with: log(-expm1(x)), where: isTooSmall)
}

/// Returns the sine of the specified tensor element-wise.
@inlinable
@differentiable(vjp: _vjpSin(_:))
Expand Down Expand Up @@ -912,7 +927,7 @@ internal func _vjpSigmoid<T: TensorFlowFloatingPoint>(
}

/// Returns the log-sigmoid of the specified tensor element-wise. Specifically,
/// `y = log(1 / (1 + exp(-x)))`. For numerical stability, we use `y = -softplus(-x)`.
/// `log(1 / (1 + exp(-x)))`. For numerical stability, we use `-softplus(-x)`.
@inlinable
@differentiable
public func logSigmoid<T: TensorFlowFloatingPoint>(_ x: Tensor<T>) -> Tensor<T> {
Expand Down
26 changes: 17 additions & 9 deletions Tests/TensorFlowTests/OperatorTests/MathTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,13 @@ final class MathOperatorTests: XCTestCase {
assertEqual(y, expectedY, accuracy: 0.0001)
}

func testLog1mexp() {
let x = Tensor<Float>([-1, -2, -3, -4, -5])
let y = log1mexp(x)
let expectedY = Tensor<Float>([-0.45868, -0.14541, -0.05107, -0.01849, -0.00676])
assertEqual(y, expectedY, accuracy: 0.0001)
}

func testExpm1() {
let x = Tensor<Float>([1, 2, 3, 4, 5])
let y = expm1(x)
Expand Down Expand Up @@ -350,19 +357,20 @@ final class MathOperatorTests: XCTestCase {
}

func testBroadcastedAddGradient() {
func foo(_ x: Tensor<Float>, _ y: Tensor<Float>) -> Tensor<Float> {
return (x + y).sum()
}
let x = Tensor<Float>(ones: [1, 2, 1, 4])
let y = Tensor<Float>(ones: [4, 1, 3, 1])
let (dx, dy) = gradient(at: x, y, in: foo)
XCTAssertEqual(x.shape, dx.shape)
XCTAssertEqual(y.shape, dy.shape)
}
func foo(_ x: Tensor<Float>, _ y: Tensor<Float>) -> Tensor<Float> {
return (x + y).sum()
}
let x = Tensor<Float>(ones: [1, 2, 1, 4])
let y = Tensor<Float>(ones: [4, 1, 3, 1])
let (dx, dy) = gradient(at: x, y, in: foo)
XCTAssertEqual(x.shape, dx.shape)
XCTAssertEqual(y.shape, dy.shape)
}

static var allTests = [
("testElementaryFunctions", testElementaryFunctions),
("testLog1p", testLog1p),
("testLog1mexp", testLog1mexp),
("testExpm1", testExpm1),
("testSign", testSign),
("testLogSigmoid", testLogSigmoid),
Expand Down