# git clone -b v2.3.2 --recursive https://github.com/Dao-AILab/flash-attention && \

export MAX_JOBS=8
curl https://cr-images-pub.oss-cn-hangzhou.aliyuncs.com/root/modelscope/flash-attention.tar.gz|tar -xz
cd flash-attention && python setup.py install && \
cd .. && \
rm -rf flash-attention
