From 1db3a5a2f424bfc871857c12116d6026b506d922 Mon Sep 17 00:00:00 2001 From: Max Podkorytov <4273004+tenpercent@users.noreply.github.com> Date: Wed, 28 Feb 2024 19:23:24 +0000 Subject: [PATCH] unskip test_unsupported_alignment --- tests/test_mem_eff_attention.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/test_mem_eff_attention.py b/tests/test_mem_eff_attention.py index 5d0ee35233..02c9957db4 100644 --- a/tests/test_mem_eff_attention.py +++ b/tests/test_mem_eff_attention.py @@ -1560,9 +1560,6 @@ def test_unsupported_stride_lastdim(op: Type[fmha.AttentionFwOpBase]): 0, 3, 1, 2 ) - if skip_reasons := op.not_supported_reasons(fmha.Inputs(q, q, q)): - pytest.skip("; ".join(skip_reasons)) - try: fmha.memory_efficient_attention(q, q, q, op=(op, None)) except ValueError as e: @@ -1579,9 +1576,6 @@ def test_unsupported_stride_lastdim(op: Type[fmha.AttentionFwOpBase]): def test_unsupported_stride_alignment(op: Type[fmha.AttentionFwOpBase]): q = torch.empty([1, 2, 1, 33], device="cuda", dtype=torch.float16)[:, :, :, :32] - if skip_reasons := op.not_supported_reasons(fmha.Inputs(q, q, q)): - pytest.skip("; ".join(skip_reasons)) - try: fmha.memory_efficient_attention(q, q, q, op=(op, None)) except ValueError as e: