nv-flash-attn #307
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: nv-flash-attn | |
on: | |
workflow_dispatch: | |
pull_request: | |
paths: | |
- 'deepspeed/sequence/**' | |
- 'tests/unit/sequence_parallelism/**' | |
- '.github/workflows/nv-flash-attn.yml' | |
schedule: | |
- cron: "0 0 * * *" | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
jobs: | |
unit-tests: | |
runs-on: [self-hosted, nvidia, a6000] | |
container: | |
image: nvcr.io/nvidia/pytorch:24.12-py3 | |
ports: | |
- 80 | |
options: --gpus all --shm-size "8G" | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Check container state | |
run: | | |
ldd --version | |
nvcc --version | |
nvidia-smi | |
python -c "import torch; print('torch:', torch.__version__, torch)" | |
python -c "import torch; print('CUDA available:', torch.cuda.is_available())" | |
- name: Install deepspeed | |
run: | | |
python -m pip install .[dev] | |
ds_report | |
# install transformers after deepspeed so that the right version of transformers is installed | |
- name: Install transformers | |
run: | | |
python -m pip install transformers==4.50.0 | |
- name: Install FlashAttention | |
run: | | |
python -m pip install flash-attn | |
- name: Python environment | |
run: | | |
python -m pip list | |
- name: Unit tests | |
run: | | |
unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch | |
cd tests | |
python -m pytest --color=yes --durations=0 --verbose -rF unit/sequence_parallelism/test_ulysses.py --torch_ver="2.6" --cuda_ver="12" | |
- name: Open GitHub issue if nightly CI fails | |
if: ${{ failure() && (github.event_name == 'schedule') }} | |
uses: JasonEtco/create-an-issue@v2 | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
with: | |
filename: .github/ISSUE_TEMPLATE/ci_failure_report.md | |
update_existing: true |