@@ -58,7 +58,7 @@ SHELL ["/bin/bash", "-c"]
5858ENV PATH=/root/.local/bin:$PATH
5959
6060RUN uv python pin ${PY_VERSION}
61- RUN uv pip install --no-cache --system ipython tqdm rich jupyter jupyterlab ipykernel pandas einops safetensors pyyaml requests psutil opencv-python-headless matplotlib seaborn scikit-learn scipy pillow tensorboard h5py
61+ RUN uv pip install --no-cache --system ipython tqdm rich jupyter jupyterlab ipykernel pandas einops safetensors pyyaml requests psutil opencv-python-headless matplotlib seaborn scikit-learn scipy pillow tensorboard h5py triton
6262
6363# torch installer
6464COPY <<EOF /usr/local/bin/install_torch_env
@@ -67,28 +67,31 @@ set -e
6767TORCH_VER=\$ 1
6868VISION_VER=\$ 2
6969CUDA_VER=\$ 3
70+ FLASH_ATTN_LINK=\$ 4
71+
7072BASE_DIR=/ddiff-base/py\$ {PY_VERSION}-torch\$ {TORCH_VER}
7173mkdir -p "\$ BASE_DIR" && cd "\$ BASE_DIR"
7274uv venv --python "\$ {PY_VERSION}" --system-site-packages --seed
7375uv pip install --no-cache torch=="\$ {TORCH_VER}" torchvision=="\$ {VISION_VER}" torchaudio=="\$ {TORCH_VER}" --index-url "https://download.pytorch.org/whl/\$ {CUDA_VER}"
74- echo "ln -sfn \$ {BASE_DIR}/.venv ./.venv && uv add torch==\$ {TORCH_VER} torchvision==\$ {VISION_VER} torchaudio==\$ {TORCH_VER} && echo Created uv virtual environment with torch==\$ {TORCH_VER}" > /usr/local/bin/uv_init_torch\$ {TORCH_VER}
76+ uv pip install --no-cache \$ FLASH_ATTN_LINK
77+ echo "ln -sfn \$ {BASE_DIR}/.venv ./.venv && [ -f pyproject.toml ] && uv add torch==\$ {TORCH_VER} torchvision==\$ {VISION_VER} torchaudio==\$ {TORCH_VER}; echo Created uv virtual environment with torch==\$ {TORCH_VER}" > /usr/local/bin/uv_init_torch\$ {TORCH_VER}
7578chmod +x /usr/local/bin/uv_init_torch\$ {TORCH_VER}
7679EOF
7780RUN chmod +x /usr/local/bin/install_torch_env
7881
7982ENV UV_NO_CACHE=1
8083
8184# pytorch 2.4.1 (251209 Update)
82- RUN install_torch_env 2.4.1 0.19.1 cu124
85+ RUN install_torch_env 2.4.1 0.19.1 cu124 https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.3.12/flash_attn-2.8.0+cu124torch2.4-cp310-cp310-linux_x86_64.whl
8386
8487# pytorch 2.5.1 (251209 Update)
85- RUN install_torch_env 2.5.1 0.20.1 cu124
88+ RUN install_torch_env 2.5.1 0.20.1 cu124 https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.4.11/flash_attn-2.8.3+cu124torch2.5-cp310-cp310-linux_x86_64.whl
8689
8790# pytorch 2.6.0 (251209 Update)
88- RUN install_torch_env 2.6.0 0.21.0 cu124
91+ RUN install_torch_env 2.6.0 0.21.0 cu124 https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.4.11/flash_attn-2.8.3+cu124torch2.6-cp310-cp310-linux_x86_64.whl
8992
9093# pytorch 2.7.1 (251209 Update)
91- RUN install_torch_env 2.7.1 0.22.1 cu126
94+ RUN install_torch_env 2.7.1 0.22.1 cu126 https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.4.11/flash_attn-2.8.3+cu126torch2.7-cp310-cp310-linux_x86_64.whl
9295
9396# pytorch 2.9.0 (251209 Update)
94- RUN install_torch_env 2.9.0 0.24.0 cu126
97+ RUN install_torch_env 2.9.0 0.24.0 cu126 https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.4.17/flash_attn-2.6.3+cu126torch2.9-cp310-cp310-linux_x86_64.whl
0 commit comments