[misc] feat: support different flash_attn versions with variable num returns #21
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: model_rmpad | |
on: | |
# Trigger the workflow on push or pull request, | |
# but only for the main branch | |
push: | |
branches: | |
- main | |
paths: | |
- "**/*.py" | |
- .github/workflows/model.yml | |
pull_request: | |
branches: | |
- main | |
paths: | |
- "**/*.py" | |
- .github/workflows/model.yml | |
jobs: | |
e2e_gpu: | |
runs-on: [self-hosted, l20-1] | |
env: | |
HTTP_PROXY: ${{ secrets.PROXY_HTTP }} | |
HTTPS_PROXY: ${{ secrets.PROXY_HTTPS }} | |
NO_PROXY: "localhost,127.0.0.1" | |
container: | |
image: verlai/verl:vemlp-th2.4.0-cu124-vllm0.6.3-ray2.10-te1.7-v0.0.3 | |
options: --gpus all --shm-size=10g | |
steps: | |
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | |
with: | |
fetch-depth: 0 | |
- name: Install the current repository and upgrade to latest transformers/flash_attn | |
run: | | |
pip3 install -e .[test] | |
pip3 install --upgrade transformers | |
- name: Running digit completon e2e training tests on 8 L20 GPUs + flash_attn 2.5.8 | |
run: | | |
pytest -s tests/model/test_transformer.py | |
- name: Running digit completon e2e training tests on 8 L20 GPUs + latest flash_attn | |
run: | | |
pip3 install --upgrade flash_attn --no-build-isolation | |
pytest -s tests/model/test_transformer.py |