#!/bin/bash

cp /data/nfs-ten1/nfs/meichaoyang001/envs/.condarc /root/.condarc
eval "$(conda shell.bash hook)"
conda create --prefix=/data/nfs-ten1/nfs/meichaoyang001/envs/llama_240718_cuda12_2 python=3.11 -y
conda activate /data/nfs-ten1/nfs/meichaoyang001/envs/llama_240718_cuda12_2

cd /data/nfs-ten1/nfs/meichaoyang001/pypi/
pip install torch-2.4.0+cu121-cp311-cp311-linux_x86_64.whl torchvision-0.19.0+cu121-cp311-cp311-linux_x86_64.whl torchaudio-2.4.0+cu121-cp311-cp311-linux_x86_64.whl -i https://mirror.sjtu.edu.cn/pypi/web/simple/
cd -

pip install -r requirements.txt -i https://mirror.sjtu.edu.cn/pypi/web/simple/

cd /data/nfs-ten1/nfs/meichaoyang001/pypi/
pip install flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp311-cp311-linux_x86_64.whl -i https://mirror.sjtu.edu.cn/pypi/web/simple/ # /data/nfs-ten1/nfs/meichaoyang001/project/flash-attention
pip install cupy-cuda12x -i https://mirror.sjtu.edu.cn/pypi/web/simple/
pip install vllm==0.5.4 -i https://mirror.sjtu.edu.cn/pypi/web/simple/
pip install xformers -i https://pypi.mirrors.ustc.edu.cn/simple/
cd -


# cd ../ && git clone https://gitee.com/meichaoyang/flash-attention.git
cd ../flash-attention && pip install csrc/layer_norm && pip install csrc/rotary # && pip install flash-attn --no-build-isolation

pip install deepspeed==0.14.0 -i https://pypi.mirrors.ustc.edu.cn/simple/
pip install -e .[qwen,modelscope,accelerate,bitsandbytes] -i https://pypi.mirrors.ustc.edu.cn/simple/

