@@ -99,30 +99,7 @@ def fn():
99
99
100
100
101
101
######################################################################
102
- # Sample Output:
103
- #
104
- # .. code-block:: bash
105
- #
106
- # >>[DEBUG]:Recompiling function step in /data/users/mlazos/pytorch/torch/optim/adam.py:191
107
- # >> triggered by the following guard failure(s):
108
- # >> - L['self'].param_groups[0]['lr'] == 0.003333333333333333
109
- # >>[DEBUG]:Recompiling function step in /data/users/mlazos/pytorch/torch/optim/adam.py:191
110
- # >> triggered by the following guard failure(s):
111
- # >> - L['self'].param_groups[0]['lr'] == 0.004666666666666667
112
- # >> - L['self'].param_groups[0]['lr'] == 0.003333333333333333
113
- # >>[DEBUG]:Recompiling function step in /data/users/mlazos/pytorch/torch/optim/adam.py:191
114
- # >> triggered by the following guard failure(s):
115
- # >> - L['self'].param_groups[0]['lr'] == 0.006000000000000001
116
- # >> - L['self'].param_groups[0]['lr'] == 0.004666666666666667
117
- # >> - L['self'].param_groups[0]['lr'] == 0.003333333333333333
118
- # >>[DEBUG]:Recompiling function step in /data/users/mlazos/pytorch/torch/optim/adam.py:191
119
- # >> triggered by the following guard failure(s):
120
- # >> - L['self'].param_groups[0]['lr'] == 0.007333333333333335
121
- # >> - L['self'].param_groups[0]['lr'] == 0.006000000000000001
122
- # >> - L['self'].param_groups[0]['lr'] == 0.004666666666666667
123
- # >> - L['self'].param_groups[0]['lr'] == 0.003333333333333333
124
- #
125
- # With this example, we can see that we recompile the optimizer 4 additional times
102
+ # With this example, we can see that we recompile the optimizer a few times
126
103
# due to the guard failure on the 'lr' in param_groups[0].
127
104
128
105
######################################################################
0 commit comments