Skip to content

Commit 1e57438

Browse files
digantdesaiYIWENX14
authored andcommitted
Dynamic shape testing for softmax
Differential Revision: D68586985 Pull Request resolved: #7914
1 parent 4cbabce commit 1e57438

File tree

1 file changed

+7
-2
lines changed

1 file changed

+7
-2
lines changed

backends/xnnpack/test/ops/test_softmax.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,22 @@ def _test_softmax(self, inputs):
2424
# as xnnpack only supports softmax on the last dimension.
2525
valid_dims = [len(inputs[0]) - 1, -1]
2626

27+
dynamic_shape = {}
28+
for i in range(len(inputs[0].shape)):
29+
dynamic_shape[i] = torch.export.Dim(f"dynamic_dim{i}", min=1, max=100)
30+
dynamic_shape = (dynamic_shape,)
31+
2732
for dim in valid_dims:
2833
(
29-
Tester(self.Softmax(dim), inputs)
34+
Tester(self.Softmax(dim), inputs, dynamic_shapes=dynamic_shape)
3035
.export()
3136
.check_count({"torch.ops.aten.softmax": 1})
3237
.to_edge_transform_and_lower()
3338
.check_count({"torch.ops.higher_order.executorch_call_delegate": 1})
3439
.check_not(["executorch_exir_dialects_edge__ops_aten__softmax_default"])
3540
.to_executorch()
3641
.serialize()
37-
.run_method_and_compare_outputs()
42+
.run_method_and_compare_outputs(num_runs=5)
3843
)
3944

4045
def test_fp16_softmax(self):

0 commit comments

Comments
 (0)