@@ -22,7 +22,7 @@ def forward(self, x):
22
22
)[0 ]
23
23
24
24
inputs = [torch .randn (input_shape )]
25
- self .run_test (LayerNorm (), inputs , use_dynamo_tracer = True )
25
+ self .run_test (LayerNorm (), inputs , use_dynamo_tracer = True , enable_passes = True )
26
26
27
27
@parameterized .expand (
28
28
[
@@ -44,7 +44,7 @@ def forward(self, x, weight, bias):
44
44
torch .randn (normalized_shape ),
45
45
torch .randn (normalized_shape ),
46
46
]
47
- self .run_test (LayerNorm (), inputs , use_dynamo_tracer = True )
47
+ self .run_test (LayerNorm (), inputs , use_dynamo_tracer = True , enable_passes = True )
48
48
49
49
def test_layernorm_with_dynamic_shape (self ):
50
50
class LayerNorm (torch .nn .Module ):
@@ -65,7 +65,7 @@ def forward(self, x, weight, bias):
65
65
]
66
66
67
67
self .run_test_with_dynamic_shape (
68
- LayerNorm (), input_specs , use_dynamo_tracer = True
68
+ LayerNorm (), input_specs , use_dynamo_tracer = True , enable_passes = True
69
69
)
70
70
71
71
def test_layernorm_with_dynamic_shape_1 (self ):
@@ -87,7 +87,7 @@ def forward(self, x, weight, bias):
87
87
]
88
88
89
89
self .run_test_with_dynamic_shape (
90
- LayerNorm (), input_specs , use_dynamo_tracer = True
90
+ LayerNorm (), input_specs , use_dynamo_tracer = True , enable_passes = True
91
91
)
92
92
93
93
0 commit comments