Skip to content

Commit

Permalink
min ci changes
Browse files Browse the repository at this point in the history
  • Loading branch information
micmelesse committed Feb 12, 2025
1 parent 51862e0 commit 0fd32e4
Showing 1 changed file with 22 additions and 22 deletions.
44 changes: 22 additions & 22 deletions .github/workflows/amd_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,33 +50,33 @@ jobs:
python setup.py install
# CDNA Tests
# - name: Flash Attention Tests Using Reference Impl
# if: matrix.runner == 'linux-mi300-gpu-1'
# run: |
# export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
# export FLASH_ATTENTION_TRITON_AMD_REF=1
# pytest tests/test_flash_attn_triton_amd.py
# - name: Flash Attention CDNA Tests
# if: matrix.runner == 'linux-mi300-gpu-1'
# run: |
# export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
# pytest tests/test_flash_attn_triton_amd.py
- name: Flash Attention Tests Using Reference Impl
if: matrix.runner == 'linux-mi300-gpu-1'
run: |
export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
export FLASH_ATTENTION_TRITON_AMD_REF=1
pytest tests/test_flash_attn_triton_amd.py
- name: Flash Attention CDNA Tests
if: matrix.runner == 'linux-mi300-gpu-1'
run: |
export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
pytest tests/test_flash_attn_triton_amd.py
- name: AMD Tests
if: matrix.runner == 'linux-mi300-gpu-1'
run: |
export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
pytest -v -s flash_attn/flash_attn_triton_amd/test.py::test_op_prefill_fp8 flash_attn/flash_attn_triton_amd/test.py::test_op_prefill_varlen_fp8
# - name: AMD Bench
# if: matrix.runner == 'linux-mi300-gpu-1'
# run: |
# export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
# python flash_attn/flash_attn_triton_amd/bench.py
# - name: AMD Bench with Autotune
# if: matrix.runner == 'linux-mi300-gpu-1'
# run: |
# export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
# export FLASH_ATTENTION_TRITON_AMD_AUTOTUNE=1
# python flash_attn/flash_attn_triton_amd/bench.py
- name: AMD Bench
if: matrix.runner == 'linux-mi300-gpu-1'
run: |
export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
python flash_attn/flash_attn_triton_amd/bench.py
- name: AMD Bench with Autotune
if: matrix.runner == 'linux-mi300-gpu-1'
run: |
export FLASH_ATTENTION_TRITON_AMD_ENABLE="TRUE"
export FLASH_ATTENTION_TRITON_AMD_AUTOTUNE=1
python flash_attn/flash_attn_triton_amd/bench.py
# RDNA Tests
- name: Flash Attention RDNA Tests
Expand Down

0 comments on commit 0fd32e4

Please sign in to comment.