use prebuilt wheels for flash-attn and deepspeed
Browse files- scripts/setup-runpod.sh +8 -0
scripts/setup-runpod.sh
CHANGED
@@ -26,6 +26,14 @@ if [ -z "${TORCH_CUDA_ARCH_LIST}" ]; then # only set this if not set yet
|
|
26 |
export TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX"
|
27 |
fi
|
28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
cd /workspace/
|
30 |
git clone https://github.com/winglian/axolotl.git
|
31 |
cd axolotl
|
|
|
26 |
export TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6+PTX"
|
27 |
fi
|
28 |
|
29 |
+
# install flash-attn and deepspeed from pre-built wheels for this specific container b/c these take forever to install
|
30 |
+
mkdir -p /workspace/wheels
|
31 |
+
cd /workspace/wheels
|
32 |
+
curl -L -O https://github.com/winglian/axolotl/raw/wheels/wheels/deepspeed-0.9.2%2B7ddc3b01-cp38-cp38-linux_x86_64.whl
|
33 |
+
curl -L -O https://github.com/winglian/axolotl/raw/wheels/wheels/flash_attn-1.0.4-cp38-cp38-linux_x86_64.whl
|
34 |
+
pip install deepspeed-0.9.2%2B7ddc3b01-cp38-cp38-linux_x86_64.whl
|
35 |
+
pip install flash_attn-1.0.4-cp38-cp38-linux_x86_64.whl
|
36 |
+
|
37 |
cd /workspace/
|
38 |
git clone https://github.com/winglian/axolotl.git
|
39 |
cd axolotl
|