Hanrui / syxin /Specforge /scripts /setup_env.sh
Lekr0's picture
Add files using upload-large-folder tool
2d67aa6 verified
#!/bin/bash
set -euo pipefail
ENV_NAME=spec
TUNA=https://pypi.tuna.tsinghua.edu.cn/simple
CONDA=/workspace/miniconda3/bin/conda
echo "=== 1/6 创建 conda 环境 ==="
$CONDA create -n $ENV_NAME python=3.11 -y
source /workspace/miniconda3/etc/profile.d/conda.sh
conda activate $ENV_NAME
echo "=== 2/6 安装 PyTorch ==="
pip install -U pip setuptools wheel
pip install torch==2.9.1 torchvision==0.24.1 torchaudio==2.9.1 \
--index-url https://download.pytorch.org/whl/cu128
echo "=== 3/6 安装核心依赖 ==="
pip install -i $TUNA \
transformers==4.57.1 \
accelerate \
datasets \
tqdm \
peft \
safetensors \
pydantic \
numpy \
typing_extensions \
sglang==0.5.6 \
sgl-kernel \
yunchang
echo "=== 4/6 安装工具包 ==="
pip install -i $TUNA \
packaging \
ninja \
psutil
echo "=== 5/6 安装可选依赖 ==="
pip install flash-attn --no-build-isolation -i $TUNA || echo "WARNING: flash-attn 安装失败,将 fallback 到 flex_attention"
pip install wandb -i $TUNA || true
echo "=== 6/6 安装 Specforge ==="
cd /workspace/hanrui/syxin_old/Specforge
pip install -e . --no-deps
echo ""
echo "=== 验证 ==="
python -c "
import torch
print(f'PyTorch: {torch.__version__}')
print(f'CUDA: {torch.cuda.is_available()}, {torch.version.cuda}')
from torch.nn.attention.flex_attention import flex_attention
print('flex_attention: OK')
import transformers; print(f'transformers: {transformers.__version__}')
import accelerate; print(f'accelerate: {accelerate.__version__}')
import datasets; print('datasets: OK')
import peft; print(f'peft: {peft.__version__}')
import yunchang; print('yunchang: OK')
import sglang; print('sglang: OK')
import safetensors; print('safetensors: OK')
import pydantic; print('pydantic: OK')
print()
print('All good!')
"