import os
import sys
from pathlib import Path
import shutil
import hashlib

# 添加项目根目录到 Python 路径
ROOT_DIR = Path(__file__).parent.parent
sys.path.append(str(ROOT_DIR))

import torch
import time
from transformers import AutoModelForCausalLM, AutoTokenizer
from app.config import settings
from app.utils.logger import setup_logger
import requests
from tqdm import tqdm

logger = setup_logger("model_downloader")

def download_with_retry(func, *args, max_retries=3, **kwargs):
    """带重试机制的下载函数"""
    for attempt in range(max_retries):
        try:
            return func(*args, **kwargs)
        except Exception as e:
            if attempt == max_retries - 1:
                raise e
            logger.warning(f"下载失败，尝试重试 ({attempt + 1}/{max_retries})")
            time.sleep(2 ** attempt)  # 指数退避

def verify_file(file_path: Path, expected_sha256: str = None) -> bool:
    """验证文件完整性"""
    if not file_path.exists():
        return False
        
    if expected_sha256:
        sha256_hash = hashlib.sha256()
        with open(file_path, "rb") as f:
            for byte_block in iter(lambda: f.read(4096), b""):
                sha256_hash.update(byte_block)
        return sha256_hash.hexdigest() == expected_sha256
    return True

def manual_download_model():
    """手动下载模型文件"""
    cache_dir = Path(settings.MODEL_CACHE_DIR)
    model_dir = cache_dir / settings.MODEL_NAME
    model_dir.mkdir(parents=True, exist_ok=True)
    
    # 模型文件信息
    model_files = {
        "model.safetensors": {
            "url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0/resolve/main/model.safetensors",
            "sha256": "6e6001da2106d4757498752a021df6c2bdc332c650aae4bae6b0c004dcf14933"
        },
        "config.json": {
            "url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0/raw/main/config.json"
        },
        "tokenizer.json": {
            "url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0/raw/main/tokenizer.json"
        },
        "tokenizer_config.json": {
            "url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0/raw/main/tokenizer_config.json"
        }
    }
    
    for filename, info in model_files.items():
        file_path = model_dir / filename
        if not file_path.exists() or settings.FORCE_DOWNLOAD:
            logger.info(f"正在下载 {filename}...")
            
            if filename == "model.safetensors":
                logger.info("请手动下载模型文件:")
                logger.info(f"1. 访问: {info['url']}")
                logger.info(f"2. 将下载的文件保存到: {file_path}")
                logger.info(f"3. SHA256校验值: {info['sha256']}")
                input("下载完成后按回车继续...")
                
                if not verify_file(file_path, info['sha256']):
                    raise RuntimeError(f"文件 {filename} 校验失败或不存在")
            else:
                response = requests.get(info['url'])
                if response.status_code == 200:
                    with open(file_path, 'wb') as f:
                        f.write(response.content)
                else:
                    raise RuntimeError(f"下载 {filename} 失败")
                    
    logger.info("所有文件下载完成！")
    return True

def check_network():
    """检查网络连接"""
    try:
        # 移除代理设置，直接连接
        response = requests.get(
            "https://huggingface.co",
            timeout=10
        )
        
        if response.status_code == 200:
            logger.info("成功连接到 Hugging Face")
            return True
        else:
            logger.error(f"连接 Hugging Face 失败，状态码: {response.status_code}")
            return False
            
    except Exception as e:
        logger.error(f"网络检查失败: {str(e)}")
        return False

def main():
    """主函数"""
    try:
        if settings.MANUAL_DOWNLOAD:
            manual_download_model()
        else:
            # 原有的自动下载逻辑
            if not check_network():
                logger.error("无法连接到 Hugging Face，请检查网络连接")
                logger.info("建议尝试手动下载模式：设置 MANUAL_DOWNLOAD=true")
                exit(1)
            # ... 其余代码保持不变

if __name__ == "__main__":
    main() 