Skip to content

Commit

Permalink
Dynamic shape testing for softmax
Browse files Browse the repository at this point in the history
Differential Revision: D68586985

Pull Request resolved: pytorch#7914
  • Loading branch information
digantdesai authored Jan 24, 2025
1 parent b522084 commit f465c08
Showing 1 changed file with 7 additions and 2 deletions.
9 changes: 7 additions & 2 deletions backends/xnnpack/test/ops/test_softmax.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,22 @@ def _test_softmax(self, inputs):
# as xnnpack only supports softmax on the last dimension.
valid_dims = [len(inputs[0]) - 1, -1]

dynamic_shape = {}
for i in range(len(inputs[0].shape)):
dynamic_shape[i] = torch.export.Dim(f"dynamic_dim{i}", min=1, max=100)
dynamic_shape = (dynamic_shape,)

for dim in valid_dims:
(
Tester(self.Softmax(dim), inputs)
Tester(self.Softmax(dim), inputs, dynamic_shapes=dynamic_shape)
.export()
.check_count({"torch.ops.aten.softmax": 1})
.to_edge_transform_and_lower()
.check_count({"torch.ops.higher_order.executorch_call_delegate": 1})
.check_not(["executorch_exir_dialects_edge__ops_aten__softmax_default"])
.to_executorch()
.serialize()
.run_method_and_compare_outputs()
.run_method_and_compare_outputs(num_runs=5)
)

def test_fp16_softmax(self):
Expand Down

0 comments on commit f465c08

Please sign in to comment.