File tree Expand file tree Collapse file tree 1 file changed +7
-2
lines changed
backends/xnnpack/test/ops Expand file tree Collapse file tree 1 file changed +7
-2
lines changed Original file line number Diff line number Diff line change @@ -24,17 +24,22 @@ def _test_softmax(self, inputs):
24
24
# as xnnpack only supports softmax on the last dimension.
25
25
valid_dims = [len (inputs [0 ]) - 1 , - 1 ]
26
26
27
+ dynamic_shape = {}
28
+ for i in range (len (inputs [0 ].shape )):
29
+ dynamic_shape [i ] = torch .export .Dim (f"dynamic_dim{ i } " , min = 1 , max = 100 )
30
+ dynamic_shape = (dynamic_shape ,)
31
+
27
32
for dim in valid_dims :
28
33
(
29
- Tester (self .Softmax (dim ), inputs )
34
+ Tester (self .Softmax (dim ), inputs , dynamic_shapes = dynamic_shape )
30
35
.export ()
31
36
.check_count ({"torch.ops.aten.softmax" : 1 })
32
37
.to_edge_transform_and_lower ()
33
38
.check_count ({"torch.ops.higher_order.executorch_call_delegate" : 1 })
34
39
.check_not (["executorch_exir_dialects_edge__ops_aten__softmax_default" ])
35
40
.to_executorch ()
36
41
.serialize ()
37
- .run_method_and_compare_outputs ()
42
+ .run_method_and_compare_outputs (num_runs = 5 )
38
43
)
39
44
40
45
def test_fp16_softmax (self ):
You can’t perform that action at this time.
0 commit comments