Skip to content

Commit b0b6f38

Browse files
authored
Fix failed TestNativeLayerNormConverter (#3315)
1 parent 8a81e3b commit b0b6f38

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

tests/py/dynamo/conversion/test_layer_norm_aten.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def forward(self, x):
2222
)[0]
2323

2424
inputs = [torch.randn(input_shape)]
25-
self.run_test(LayerNorm(), inputs, use_dynamo_tracer=True)
25+
self.run_test(LayerNorm(), inputs, use_dynamo_tracer=True, enable_passes=True)
2626

2727
@parameterized.expand(
2828
[
@@ -44,7 +44,7 @@ def forward(self, x, weight, bias):
4444
torch.randn(normalized_shape),
4545
torch.randn(normalized_shape),
4646
]
47-
self.run_test(LayerNorm(), inputs, use_dynamo_tracer=True)
47+
self.run_test(LayerNorm(), inputs, use_dynamo_tracer=True, enable_passes=True)
4848

4949
def test_layernorm_with_dynamic_shape(self):
5050
class LayerNorm(torch.nn.Module):
@@ -65,7 +65,7 @@ def forward(self, x, weight, bias):
6565
]
6666

6767
self.run_test_with_dynamic_shape(
68-
LayerNorm(), input_specs, use_dynamo_tracer=True
68+
LayerNorm(), input_specs, use_dynamo_tracer=True, enable_passes=True
6969
)
7070

7171
def test_layernorm_with_dynamic_shape_1(self):
@@ -87,7 +87,7 @@ def forward(self, x, weight, bias):
8787
]
8888

8989
self.run_test_with_dynamic_shape(
90-
LayerNorm(), input_specs, use_dynamo_tracer=True
90+
LayerNorm(), input_specs, use_dynamo_tracer=True, enable_passes=True
9191
)
9292

9393

0 commit comments

Comments
 (0)