File tree Expand file tree Collapse file tree 1 file changed +8
-8
lines changed
py/torch_tensorrt/dynamo/conversion/impl/normalization Expand file tree Collapse file tree 1 file changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -48,16 +48,16 @@ def batch_norm(
48
48
assert input .shape [1 ] != - 1 , "Channel dim can't be dynamic for batch norm."
49
49
50
50
if weight is None :
51
- weight = np . array ( 1.0 )
51
+ weight = 1.0
52
52
53
53
if bias is None :
54
- bias = np . array ( 0.0 )
54
+ bias = 0.0
55
55
56
56
if running_mean is None :
57
- running_mean = np . array ( 0.0 )
57
+ running_mean = 0.0
58
58
59
59
if running_var is None :
60
- running_var = np . array ( 1.0 )
60
+ running_var = 1.0
61
61
62
62
scale = cast (torch .Tensor , to_numpy (weight )) / np .sqrt (
63
63
cast (torch .Tensor , to_numpy (running_var )) + eps
@@ -115,10 +115,10 @@ def layer_norm(
115
115
)
116
116
117
117
if weight is None :
118
- weight = np . array (1.0 )
118
+ weight = to_numpy (1.0 )
119
119
120
120
if bias is None :
121
- bias = np . array (0.0 )
121
+ bias = to_numpy (0.0 )
122
122
123
123
gamma = (
124
124
weight .detach ().cpu ().float ().numpy ()
@@ -181,10 +181,10 @@ def layer_norm_no_plugin(
181
181
)
182
182
183
183
if weight is None :
184
- weight = np . array (1.0 )
184
+ weight = to_numpy (1.0 )
185
185
186
186
if bias is None :
187
- bias = np . array (0.0 )
187
+ bias = to_numpy (0.0 )
188
188
189
189
shape = weight .shape
190
190
broadcasted_shape = (1 ,) * (len (input .shape ) - len (shape )) + shape
You can’t perform that action at this time.
0 commit comments