enable packed layouts and all configs #87
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: AMD Perf Kernel Tests | |
on: | |
workflow_dispatch: | |
pull_request: | |
branches: [main_perf] | |
merge_group: | |
branches: [main_perf] | |
types: [checks_requested] | |
push: | |
branches: [main_perf] | |
concurrency: | |
group: ${{ github.ref }} | |
cancel-in-progress: true | |
permissions: read-all | |
jobs: | |
Runner-Preparation-AMD: | |
runs-on: ubuntu-latest | |
timeout-minutes: 30 | |
outputs: | |
matrix-HIP: ${{ steps.set-matrix.outputs.matrix-HIP }} | |
steps: | |
- name: Prepare runner matrix | |
id: set-matrix | |
run: | | |
if [ x"${{ github.repository }}" == x"ROCm/flash-attention" ]; then | |
echo '::set-output name=matrix-HIP::[["self-hosted", "rocm"]]' | |
else | |
echo '::set-output name=matrix-HIP::[["ubuntu-latest"]]' | |
fi | |
Integration-Tests-AMD: | |
needs: Runner-Preparation-AMD | |
if: needs.Runner-Preparation-AMD.outputs.matrix-HIP != '' | |
runs-on: ${{ matrix.runner }} | |
strategy: | |
matrix: | |
runner: ${{fromJson(needs.Runner-Preparation-AMD.outputs.matrix-HIP)}} | |
container: | |
image: rocm/pytorch:rocm6.0.2_ubuntu22.04_py3.10_pytorch_2.1.2 | |
options: --device=/dev/kfd --device=/dev/dri --security-opt seccomp=unconfined --group-add video --user root | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install Triton | |
run: | | |
pip uninstall -y triton | |
pip install matplotlib pandas pytest | |
git clone https://github.com/triton-lang/triton | |
cd triton | |
git checkout 2e9f2c2d20601c24b91a4c32a7b97ad1f8a55d88 | |
pip install --verbose -e python | |
cd .. | |
- name: Build | |
run: | | |
python setup.py install | |
- name: Flash Attention Tests | |
run: | | |
pytest tests/test_flash_attn.py | |
# - name: Flash Attention qkvpacked Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_qkvpacked | |
# pytest tests/test_flash_attn.py::test_flash_attn_varlen_qkvpacked | |
# - name: Flash Attention output Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_output | |
# pytest tests/test_flash_attn.py::test_flash_attn_varlen_output | |
# - name: Flash Attention causal Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_causal | |
# pytest tests/test_flash_attn.py::test_flash_attn_varlen_causal | |
# - name: Flash Attention kvcache Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_kvcache | |
# pytest tests/test_flash_attn.py::test_flash_attn_splitkv | |
# - name: Flash Attention race condition Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_race_condition | |
# - name: Flash Attention bwd Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_bwd_overflow | |
# pytest tests/test_flash_attn.py::test_flash_attn_bwd_transpose | |
# pytest tests/test_flash_attn.py::test_flash_attn_bwd_varlen_overflow | |
# - name: Flash Attention deterministic Tests | |
# run: | | |
# pytest tests/test_flash_attn.py::test_flash_attn_deterministic | |
# pytest tests/test_flash_attn.py::test_flash_attn_varlen_deterministic | |
- name: AMD Kernel Tests | |
run: | | |
pytest -v -s flash_attn/flash_attn_triton_kernel_decode_amd.py::test_op_fwd | |
pytest -v -s flash_attn/flash_attn_triton_kernel_prefill_amd.py |