#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
下载 Chinese-CLIP 模型的脚本
"""
import os
import subprocess
import sys
from pathlib import Path


def install_git_lfs():
    """安装 Git LFS，用于下载大文件"""
    try:
        # 检查 Git LFS 是否已安装
        result = subprocess.run(['git', 'lfs', 'version'], capture_output=True, text=True)
        if result.returncode == 0:
            print("Git LFS 已安装")
            return True
    except FileNotFoundError:
        pass

    print("正在安装 Git LFS...")
    try:
        # 在 Windows 上安装 Git LFS
        subprocess.run(['git', 'lfs', 'install'], check=True)
        print("Git LFS 安装成功")
        return True
    except subprocess.CalledProcessError:
        print("Git LFS 安装失败，请手动安装 Git LFS")
        return False


def download_model_with_git(repo_url, local_path):
    """使用 Git 克隆下载模型"""
    print(f"正在从 {repo_url} 下载模型到 {local_path}")
    
    # 确保父目录存在
    os.makedirs(os.path.dirname(local_path), exist_ok=True)
    
    try:
        # 克隆仓库
        subprocess.run([
            'git', 'clone', '--recursive', 
            '--single-branch', '--depth', '1', 
            repo_url, local_path
        ], check=True)
        
        print(f"模型已成功下载到 {local_path}")
        return True
    except subprocess.CalledProcessError as e:
        print(f"Git 克隆失败: {e}")
        return False


def download_chinese_clip_model():
    """下载 Chinese-CLIP 模型"""
    print("开始下载 Chinese-CLIP 模型...")
    
    # 模型信息
    model_repo = "https://huggingface.co/OFA-Sys/chinese-clip-vit-base-patch16"
    model_name = "chinese-clip-vit-base-patch16"
    local_models_dir = Path("models")
    
    # 创建模型目录
    local_models_dir.mkdir(exist_ok=True)
    
    # 模型本地路径
    local_model_path = local_models_dir / model_name
    
    # 检查模型是否已存在
    if local_model_path.exists():
        print(f"模型已存在于 {local_model_path}，跳过下载")
        return str(local_model_path)
    
    # 安装 Git LFS
    if not install_git_lfs():
        print("无法安装 Git LFS，请手动安装后再运行此脚本")
        return None
    
    # 下载模型
    if download_model_with_git(model_repo, str(local_model_path)):
        print(f"Chinese-CLIP 模型下载完成: {local_model_path}")
        return str(local_model_path)
    else:
        print("模型下载失败")
        return None


def download_openai_clip_model():
    """下载备用的 OpenAI CLIP 模型"""
    print("开始下载备用 OpenAI CLIP 模型...")
    
    # 模型信息
    model_repo = "https://huggingface.co/openai/clip-vit-base-patch32"
    model_name = "clip-vit-base-patch32"
    local_models_dir = Path("models")
    
    # 创建模型目录
    local_models_dir.mkdir(exist_ok=True)
    
    # 模型本地路径
    local_model_path = local_models_dir / model_name
    
    # 检查模型是否已存在
    if local_model_path.exists():
        print(f"备用模型已存在于 {local_model_path}，跳过下载")
        return str(local_model_path)
    
    # 下载模型
    if download_model_with_git(model_repo, str(local_model_path)):
        print(f"备用 OpenAI CLIP 模型下载完成: {local_model_path}")
        return str(local_model_path)
    else:
        print("备用模型下载失败")
        return None


def update_config(local_model_path):
    """更新配置文件以使用本地模型"""
    config_path = Path("configs/config.json")
    
    if not config_path.exists():
        print(f"配置文件 {config_path} 不存在")
        return False
    
    # 读取现有配置
    import json
    with open(config_path, 'r', encoding='utf-8') as f:
        config = json.load(f)
    
    # 更新 CLIP 配置
    if "clip" not in config:
        config["clip"] = {}
    
    config["clip"]["model_name"] = str(local_model_path)
    config["clip"]["local_model_path"] = str(local_model_path)
    config["clip"]["offline_mode"] = True
    config["clip"]["fallback_model"] = "models/clip-vit-base-patch32"
    config["clip"]["device"] = "auto"
    
    # 写回配置文件
    with open(config_path, 'w', encoding='utf-8') as f:
        json.dump(config, f, indent=2, ensure_ascii=False)
    
    print(f"配置文件已更新: {config_path}")
    return True


def main():
    print("Chinese-CLIP 模型下载器")
    print("=" * 40)
    
    # 下载 Chinese-CLIP 模型
    chinese_model_path = download_chinese_clip_model()
    
    if chinese_model_path:
        # 更新配置文件
        update_config(chinese_model_path)
        print("\nChinese-CLIP 模型下载和配置完成！")
    else:
        print("\nChinese-CLIP 模型下载失败，尝试下载备用模型...")
        # 下载备用模型
        fallback_model_path = download_openai_clip_model()
        if fallback_model_path:
            update_config(fallback_model_path)
            print("\n备用 OpenAI CLIP 模型下载和配置完成！")
        else:
            print("\n所有模型下载均失败，请检查网络连接或手动下载模型。")
    
    print("\n提示：")
    print("- 模型文件较大，下载可能需要一些时间")
    print("- 下载完成后，重新启动服务以应用新配置")
    print("- 如需手动配置，请参考 docs/clip_model_setup.md")


if __name__ == "__main__":
    main()