Skip to content

Commit e2e4333

Browse files
committed
Limit to MAX_JOBS=1 with CUDA 12.2
1 parent ce73503 commit e2e4333

File tree

3 files changed

+5
-4
lines changed

3 files changed

+5
-4
lines changed

.github/workflows/publish.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,8 @@ jobs:
168168
export PATH=/usr/local/nvidia/bin:/usr/local/nvidia/lib64:$PATH
169169
export LD_LIBRARY_PATH=/usr/local/nvidia/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH
170170
# Limit MAX_JOBS otherwise the github runner goes OOM
171-
MAX_JOBS=2 FLASH_ATTENTION_FORCE_BUILD="TRUE" FLASH_ATTENTION_FORCE_CXX11_ABI=${{ matrix.cxx11_abi}} python setup.py bdist_wheel --dist-dir=dist
171+
# CUDA 11.8 can compile with 2 jobs, but CUDA 12.2 goes OOM
172+
MAX_JOBS=$([ "$MATRIX_CUDA_VERSION" == "122" ] && echo 1 || echo 2) FLASH_ATTENTION_FORCE_BUILD="TRUE" FLASH_ATTENTION_FORCE_CXX11_ABI=${{ matrix.cxx11_abi}} python setup.py bdist_wheel --dist-dir=dist
172173
tmpname=cu${MATRIX_CUDA_VERSION}torch${MATRIX_TORCH_VERSION}cxx11abi${{ matrix.cxx11_abi }}
173174
wheel_name=$(ls dist/*whl | xargs -n 1 basename | sed "s/-/+$tmpname-/2")
174175
ls dist/*whl |xargs -I {} mv {} dist/${wheel_name}

flash_attn/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "2.5.9"
1+
__version__ = "2.5.9.post1"
22

33
from flash_attn.flash_attn_interface import (
44
flash_attn_func,

training/Dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ RUN pip install transformers==4.25.1 datasets==2.8.0 pytorch-lightning==1.8.6 tr
8585
RUN pip install git+https://github.com/mlcommons/logging.git@2.1.0
8686

8787
# Install FlashAttention
88-
RUN pip install flash-attn==2.5.9
88+
RUN pip install flash-attn==2.5.9.post1
8989

9090
# Install CUDA extensions for fused dense
91-
RUN pip install git+https://github.com/HazyResearch/flash-attention@v2.5.9#subdirectory=csrc/fused_dense_lib
91+
RUN pip install git+https://github.com/HazyResearch/flash-attention@v2.5.9.post1#subdirectory=csrc/fused_dense_lib

0 commit comments

Comments
 (0)