#!/usr/bin/env python3
# -*- coding: utf-8 -*-

"""
从魔搭社区下载千问嵌入模型的脚本
简化版：一键下载，无需命令行参数
"""

import os
import subprocess
from config import (
    LOCAL_MODEL_CACHE_PATH,
    LOCAL_EM_NAME,
    LOCAL_EM_PATH,
    LOCAL_LLM_NAME,
    LOCAL_LLM_PATH,
)

# 强制使用魔搭社区下载
# print("\n正在安装modelscope库...")
# subprocess.run([sys.executable, "-m", "pip", "install", "modelscope"], check=True)
from modelscope.hub.snapshot_download import snapshot_download


def download_model(cache_path, model_name, model_path):

    print(f"=== 开始下载嵌入模型: {model_name} ===")
    print(f"保存目录: {model_path}")
    print(f"使用下载源: 魔搭社区（modelscope）")

    # 确保保存目录存在
    try:
        os.makedirs(model_path, exist_ok=False)
    except FileExistsError:
        print("目录已存在！")
        return

    try:
        print("\n开始从魔搭社区下载模型...")
        snapshot_download(
            cache_dir=cache_path,
            model_id=model_name,
            local_dir=model_path,
        )

        print(f"\n=== 模型{model_name}下载完成! 已保存到: {model_path} ===")
        print("\n提示: 如果需要验证模型是否能在GPU上运行，可以运行:")
        print("   python verify_gpu.py")

    except Exception as e:
        print(f"\n模型{model_name}下载失败: {str(e)}")
        print("\n可能的解决方案:")
        print("1. 检查网络连接")
        print(f"2. 手动从 https://modelscope.cn/models 下载模型文件")
        print(f"3. 确保目标目录有写权限: {model_path}")


def main():
    # 打印配置信息
    print("=== 模型下载器配置 ===")
    print(f"- 使用下载源: 魔搭社区（modelscope）")
    print(f"- 缓存目录: {LOCAL_MODEL_CACHE_PATH}")
    print(f"- 嵌入模型名称: {LOCAL_EM_NAME}")
    print(f"- 嵌入模型保存目录: {LOCAL_EM_PATH}")
    print(f"- 魔搭社区嵌入模型ID: {LOCAL_EM_NAME}")
    print("========================")

    # 下载嵌入模型
    download_model(
        cache_path=LOCAL_MODEL_CACHE_PATH,
        model_name=LOCAL_EM_NAME,
        model_path=LOCAL_EM_PATH,
    )

    # 下载大语言模型
    download_model(
        cache_path=LOCAL_MODEL_CACHE_PATH,
        model_name=LOCAL_LLM_NAME,
        model_path=LOCAL_LLM_PATH,
    )

    print("========================")


if __name__ == "__main__":
    main()
