Skip to content
This repository was archived by the owner on Mar 30, 2022. It is now read-only.

Commit 787904e

Browse files
authored
[AutoDiff] Update method-style differential operators. (#395)
Use top-level differential operators instead of method-style ones, which have been removed. Method-style differential operators have been removed for a full release cycle now.
1 parent 1db911d commit 787904e

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

docs/site/tutorials/custom_differentiation.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@
204204
"outputs": [],
205205
"source": [
206206
"var x: Float = 30\n",
207-
"x.gradient { x -> Float in\n",
207+
"gradient(at: x) { x -> Float in\n",
208208
" // Print the partial derivative with respect to the result of `sin(x)`.\n",
209209
" let a = sin(x).withDerivative { print(\"∂+/∂sin = \\($0)\") } \n",
210210
" // Force the partial derivative with respect to `x` to be `0.5`.\n",
@@ -270,7 +270,7 @@
270270
"let y: Tensor<Float> = [0, 1, 1, 0]\n",
271271
"\n",
272272
"for _ in 0..<10 {\n",
273-
" let 𝛁model = classifier.gradient { classifier -> Tensor<Float> in\n",
273+
" let 𝛁model = gradient(at: classifier) { classifier -> Tensor<Float> in\n",
274274
" let ŷ = classifier(x).withDerivative { print(\"∂L/∂ŷ =\", $0) }\n",
275275
" let loss = (ŷ - y).squared().mean()\n",
276276
" print(\"Loss: \\(loss)\")\n",
@@ -378,7 +378,7 @@
378378
"}\n",
379379
"\n",
380380
"// Differentiate `f(x) = (cos(x))^2`.\n",
381-
"let (output, backprop) = input.valueWithPullback { input -> Float in\n",
381+
"let (output, backprop) = valueWithPullback(at: input) { input -> Float in\n",
382382
" return square(cos(input))\n",
383383
"}\n",
384384
"print(\"Running backpropagation...\")\n",
@@ -598,7 +598,7 @@
598598
" print(\"Starting training step \\(i)\")\n",
599599
" print(\" Running original computation...\")\n",
600600
" let (logits, backprop) = model.appliedForBackpropagation(to: x)\n",
601-
" let (loss, dL_dŷ) = logits.valueWithGradient { logits in\n",
601+
" let (loss, dL_dŷ) = valueWithGradient(at: logits) { logits in\n",
602602
" softmaxCrossEntropy(logits: logits, labels: y)\n",
603603
" }\n",
604604
" print(\" Loss: \\(loss)\")\n",

docs/site/tutorials/model_training_walkthrough.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -750,7 +750,7 @@
750750
},
751751
"outputs": [],
752752
"source": [
753-
"let (loss, grads) = model.valueWithGradient { model -> Tensor<Float> in\n",
753+
"let (loss, grads) = valueWithGradient(at: model) { model -> Tensor<Float> in\n",
754754
" let logits = model(firstTrainFeatures)\n",
755755
" return softmaxCrossEntropy(logits: logits, labels: firstTrainLabels)\n",
756756
"}\n",
@@ -871,7 +871,7 @@
871871
" var epochAccuracy: Float = 0\n",
872872
" var batchCount: Int = 0\n",
873873
" for batch in trainDataset {\n",
874-
" let (loss, grad) = model.valueWithGradient { (model: IrisModel) -> Tensor<Float> in\n",
874+
" let (loss, grad) = valueWithGradient(at: model) { (model: IrisModel) -> Tensor<Float> in\n",
875875
" let logits = model(batch.features)\n",
876876
" return softmaxCrossEntropy(logits: logits, labels: batch.labels)\n",
877877
" }\n",

0 commit comments

Comments
 (0)