#!/usr/bin/env python3
"""
下载insightface模型文件到本地
避免每次启动时从GitHub下载
"""
import os
import sys
import requests
import zipfile
from pathlib import Path
from tqdm import tqdm

# 项目根目录
PROJECT_ROOT = Path(__file__).parent.parent
MODEL_ROOT = PROJECT_ROOT / "models" / "insightface"

# 模型配置
MODELS = {
    "buffalo_l": {
        "url": "https://github.com/deepinsight/insightface/releases/download/v0.7/buffalo_l.zip",
        "size": "约 350MB",
        "description": "Buffalo_L - 高精度模型（推荐）"
    },
    "buffalo_s": {
        "url": "https://github.com/deepinsight/insightface/releases/download/v0.7/buffalo_s.zip",
        "size": "约 100MB",
        "description": "Buffalo_S - 轻量级模型"
    },
    "buffalo_sc": {
        "url": "https://github.com/deepinsight/insightface/releases/download/v0.7/buffalo_sc.zip",
        "size": "约 150MB",
        "description": "Buffalo_SC - 标准模型"
    }
}


def download_file(url: str, dest: Path, desc: str = "下载中"):
    """下载文件（带进度条）"""
    response = requests.get(url, stream=True, timeout=30)
    response.raise_for_status()
    
    total_size = int(response.headers.get('content-length', 0))
    
    with open(dest, 'wb') as f, tqdm(
        desc=desc,
        total=total_size,
        unit='B',
        unit_scale=True,
        unit_divisor=1024,
    ) as pbar:
        for chunk in response.iter_content(chunk_size=8192):
            if chunk:
                f.write(chunk)
                pbar.update(len(chunk))


def extract_zip(zip_path: Path, dest_dir: Path):
    """解压zip文件"""
    print(f"正在解压到 {dest_dir}...")
    with zipfile.ZipFile(zip_path, 'r') as zip_ref:
        zip_ref.extractall(dest_dir)
    print("✅ 解压完成")


def download_model(model_name: str):
    """下载指定模型"""
    if model_name not in MODELS:
        print(f"❌ 错误: 未知模型 '{model_name}'")
        print(f"可用模型: {', '.join(MODELS.keys())}")
        return False
    
    model_info = MODELS[model_name]
    model_dir = MODEL_ROOT / "models" / model_name
    
    # 检查模型是否已存在
    if model_dir.exists():
        print(f"⚠️  模型 '{model_name}' 已存在: {model_dir}")
        response = input("是否重新下载？(y/N): ")
        if response.lower() != 'y':
            print("跳过下载")
            return True
    
    print(f"\n📦 准备下载: {model_info['description']}")
    print(f"📊 大小: {model_info['size']}")
    print(f"🔗 URL: {model_info['url']}")
    
    # 创建临时目录
    MODEL_ROOT.mkdir(parents=True, exist_ok=True)
    zip_path = MODEL_ROOT / f"{model_name}.zip"
    
    try:
        # 下载
        print("\n⬇️  开始下载...")
        download_file(model_info['url'], zip_path, desc=f"下载 {model_name}")
        
        # 解压
        extract_zip(zip_path, MODEL_ROOT / "models")
        
        # 删除zip文件
        print(f"🗑️  删除临时文件: {zip_path}")
        zip_path.unlink()
        
        print(f"\n✅ 模型下载完成！")
        print(f"📂 模型位置: {model_dir}")
        return True
        
    except Exception as e:
        print(f"\n❌ 下载失败: {e}")
        # 清理临时文件
        if zip_path.exists():
            zip_path.unlink()
        return False


def list_models():
    """列出所有可用模型"""
    print("\n📋 可用模型列表:")
    print("-" * 80)
    for name, info in MODELS.items():
        status = "✅ 已下载" if (MODEL_ROOT / "models" / name).exists() else "⬜ 未下载"
        print(f"{status} {name:15s} - {info['description']:30s} ({info['size']})")
    print("-" * 80)


def check_local_models():
    """检查本地已下载的模型"""
    models_dir = MODEL_ROOT / "models"
    if not models_dir.exists():
        print(f"📂 模型目录不存在: {models_dir}")
        return
    
    print(f"\n📂 本地模型目录: {models_dir}")
    downloaded = [d.name for d in models_dir.iterdir() if d.is_dir()]
    
    if downloaded:
        print(f"✅ 已下载的模型: {', '.join(downloaded)}")
    else:
        print("⚠️  还没有下载任何模型")


def main():
    """主函数"""
    print("=" * 80)
    print("InsightFace 模型下载工具")
    print("=" * 80)
    
    # 检查已下载的模型
    check_local_models()
    
    # 列出可用模型
    list_models()
    
    if len(sys.argv) > 1:
        # 命令行指定模型名称
        model_name = sys.argv[1]
        success = download_model(model_name)
        sys.exit(0 if success else 1)
    else:
        # 交互式选择
        print("\n💡 使用方法:")
        print("   python scripts/download_models.py <模型名称>")
        print("\n例如:")
        print("   python scripts/download_models.py buffalo_l")
        print("\n或直接运行进入交互模式:")
        
        model_name = input("\n请输入要下载的模型名称 (直接回车下载 buffalo_l): ").strip()
        if not model_name:
            model_name = "buffalo_l"
        
        success = download_model(model_name)
        
        if success:
            print("\n🎉 全部完成！")
            print("\n💡 提示:")
            print(f"   - 模型已保存到: {MODEL_ROOT}")
            print(f"   - 配置文件中设置: FACE_MODEL_ROOT = './models/insightface'")
            print(f"   - 现在启动服务将使用本地模型，不会再从GitHub下载")
        
        sys.exit(0 if success else 1)


if __name__ == "__main__":
    main()

