Skip to content

Commit

Permalink
save
Browse files Browse the repository at this point in the history
  • Loading branch information
micmelesse committed Aug 9, 2024
1 parent 8d2a641 commit ebb3bd6
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 7 deletions.
18 changes: 15 additions & 3 deletions .github/workflows/amd_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,20 +57,32 @@ jobs:
- name: Build
run: |
python setup.py install
- name: Flash Attention Tests
# - name: Flash Attention qkvpacked Tests
# run: |
# pytest tests/test_flash_attn.py::test_flash_attn_qkvpacked
# pytest tests/test_flash_attn.py::test_flash_attn_varlen_qkvpacked
- name: Flash Attention output Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_qkvpacked
pytest tests/test_flash_attn.py::test_flash_attn_varlen_qkvpacked
pytest tests/test_flash_attn.py::test_flash_attn_output
pytest tests/test_flash_attn.py::test_flash_attn_varlen_output
- name: Flash Attention causal Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_causal
pytest tests/test_flash_attn.py::test_flash_attn_varlen_causal
- name: Flash Attention kvcache Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_splitkv
pytest tests/test_flash_attn.py::test_flash_attn_kvcache
- name: Flash Attention race condition Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_race_condition
- name: Flash Attention bwd Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_bwd_overflow
pytest tests/test_flash_attn.py::test_flash_attn_bwd_transpose
pytest tests/test_flash_attn.py::test_flash_attn_bwd_varlen_overflow
- name: Flash Attention deterministic Tests
run: |
pytest tests/test_flash_attn.py::test_flash_attn_deterministic
pytest tests/test_flash_attn.py::test_flash_attn_varlen_deterministic
- name: AMD Kernel Tests
Expand Down
8 changes: 4 additions & 4 deletions tests/test_flash_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,8 +626,8 @@ def test_flash_attn_qkvpacked(seqlen, d, dropout_p, causal, local, alibi, determ
pytest.skip("local sliding window attention not supported on AMD yet")

# skip all cases where seqlen_q, seqlen_k, or d are not powers of 2
if not (is_power_of_2(seqlen) and is_power_of_2(d)):
pytest.skip("seqlen_q, seqlen_k, or d are not powers of 2")
# if not (is_power_of_2(seqlen) and is_power_of_2(d)):
# pytest.skip("seqlen_q, seqlen_k, or d are not powers of 2")

if test_backward == True:
pytest.skip("Backward Attention not supported on AMD yet")
Expand Down Expand Up @@ -791,8 +791,8 @@ def test_flash_attn_varlen_qkvpacked(
pytest.skip("local sliding window attention not supported on AMD yet")

# skip all cases where seqlen_q, seqlen_k, or d are not powers of 2
if not (is_power_of_2(seqlen) and is_power_of_2(d)):
pytest.skip("seqlen_q, seqlen_k, or d are not powers of 2")
# if not (is_power_of_2(seqlen) and is_power_of_2(d)):
# pytest.skip("seqlen_q, seqlen_k, or d are not powers of 2")

if test_backward == True:
pytest.skip("Backward Attention not supported on AMD yet")
Expand Down

0 comments on commit ebb3bd6

Please sign in to comment.