#!/bin/bash

FLASH_ATTN_VERSION=2.7.4
PYTHON_VERSION=310
TORCH_VERSION=2.7.0

pip install https://github.moeyy.xyz/https://github.com/kingbri1/flash-attention/releases/download/v2.7.4.post1/flash_attn-2.7.4.post1+cu128torch2.7.0cxx11abiFALSE-cp310-cp310-win_amd64.whl


