#!/usr/bin/env python3
"""
分片上传服务
支持大文件分片上传、断点续传、合并等功能
"""

import os
import hashlib
import tempfile
from typing import Dict, List, Optional
from pathlib import Path
import logging

logger = logging.getLogger(__name__)

# 分片上传配置
CHUNK_SIZE = 5 * 1024 * 1024  # 5MB每个分片
MAX_FILE_SIZE = 500 * 1024 * 1024  # 500MB最大文件大小
TEMP_DIR = os.path.join(tempfile.gettempdir(), "redbook_uploads")

class ChunkUploadService:
    """分片上传服务"""
    
    def __init__(self):
        # 确保临时目录存在
        os.makedirs(TEMP_DIR, exist_ok=True)
        
        # 存储上传会话信息
        self.upload_sessions: Dict[str, Dict] = {}
    
    def calculate_file_hash(self, file_path: str) -> str:
        """计算文件MD5哈希值"""
        md5 = hashlib.md5()
        with open(file_path, 'rb') as f:
            for chunk in iter(lambda: f.read(4096), b''):
                md5.update(chunk)
        return md5.hexdigest()
    
    def create_upload_session(
        self,
        file_name: str,
        file_size: int,
        chunk_count: int,
        content_type: str = "application/octet-stream"
    ) -> str:
        """
        创建上传会话
        
        Args:
            file_name: 文件名
            file_size: 文件大小
            chunk_count: 分片数量
            content_type: 文件类型
            
        Returns:
            会话ID
        """
        import uuid
        session_id = str(uuid.uuid4())
        
        session_info = {
            "file_name": file_name,
            "file_size": file_size,
            "chunk_count": chunk_count,
            "content_type": content_type,
            "received_chunks": {},
            "upload_dir": os.path.join(TEMP_DIR, session_id),
            "status": "uploading"
        }
        
        # 创建会话目录
        os.makedirs(session_info["upload_dir"], exist_ok=True)
        
        self.upload_sessions[session_id] = session_info
        logger.info(f"创建上传会话: {session_id}, 文件: {file_name}")
        
        return session_id
    
    def upload_chunk(
        self,
        session_id: str,
        chunk_number: int,
        chunk_data: bytes
    ) -> bool:
        """
        上传单个分片
        
        Args:
            session_id: 会话ID
            chunk_number: 分片序号
            chunk_data: 分片数据
            
        Returns:
            是否上传成功
        """
        if session_id not in self.upload_sessions:
            logger.error(f"会话不存在: {session_id}")
            return False
        
        session = self.upload_sessions[session_id]
        
        # 保存分片到临时目录
        chunk_file = os.path.join(
            session["upload_dir"],
            f"chunk_{chunk_number:04d}"
        )
        
        try:
            with open(chunk_file, 'wb') as f:
                f.write(chunk_data)
            
            session["received_chunks"][chunk_number] = chunk_file
            logger.info(f"分片上传成功: session={session_id}, chunk={chunk_number}")
            return True
            
        except Exception as e:
            logger.error(f"分片上传失败: {e}")
            return False
    
    def check_chunk(self, session_id: str, chunk_number: int) -> bool:
        """检查分片是否已上传"""
        if session_id not in self.upload_sessions:
            return False
        
        session = self.upload_sessions[session_id]
        return chunk_number in session["received_chunks"]
    
    def get_upload_progress(self, session_id: str) -> Dict:
        """
        获取上传进度
        
        Returns:
            包含上传进度信息的字典
        """
        if session_id not in self.upload_sessions:
            return {"status": "not_found"}
        
        session = self.upload_sessions[session_id]
        received_count = len(session["received_chunks"])
        total_count = session["chunk_count"]
        
        return {
            "status": session["status"],
            "progress": received_count / total_count * 100 if total_count > 0 else 0,
            "received_chunks": received_count,
            "total_chunks": total_count,
            "file_name": session["file_name"],
            "file_size": session["file_size"]
        }
    
    def merge_chunks(self, session_id: str) -> Optional[str]:
        """
        合并分片
        
        Args:
            session_id: 会话ID
            
        Returns:
            合并后的文件路径
        """
        if session_id not in self.upload_sessions:
            logger.error(f"会话不存在: {session_id}")
            return None
        
        session = self.upload_sessions[session_id]
        
        # 检查是否所有分片都已上传
        received_count = len(session["received_chunks"])
        if received_count != session["chunk_count"]:
            logger.error(f"分片不完整: {received_count}/{session['chunk_count']}")
            return None
        
        # 合并分片
        merged_file = os.path.join(
            session["upload_dir"],
            "merged_file"
        )
        
        try:
            with open(merged_file, 'wb') as outfile:
                # 按序号排序分片
                sorted_chunks = sorted(session["received_chunks"].items())
                
                for chunk_number, chunk_path in sorted_chunks:
                    with open(chunk_path, 'rb') as infile:
                        outfile.write(infile.read())
            
            # 添加文件扩展名
            file_ext = os.path.splitext(session["file_name"])[1]
            final_file = merged_file + file_ext
            os.rename(merged_file, final_file)
            
            session["status"] = "merged"
            logger.info(f"分片合并成功: {session_id}")
            
            return final_file
            
        except Exception as e:
            logger.error(f"分片合并失败: {e}")
            session["status"] = "merge_failed"
            return None
    
    def cleanup_session(self, session_id: str) -> bool:
        """清理会话和临时文件"""
        if session_id not in self.upload_sessions:
            return False
        
        session = self.upload_sessions[session_id]
        
        try:
            # 删除临时目录
            import shutil
            if os.path.exists(session["upload_dir"]):
                shutil.rmtree(session["upload_dir"])
            
            # 删除会话
            del self.upload_sessions[session_id]
            
            logger.info(f"会话清理成功: {session_id}")
            return True
            
        except Exception as e:
            logger.error(f"会话清理失败: {e}")
            return False
    
    def validate_file_size(self, file_size: int) -> bool:
        """验证文件大小"""
        return file_size <= MAX_FILE_SIZE
    
    def calculate_chunk_count(self, file_size: int) -> int:
        """计算分片数量"""
        return (file_size + CHUNK_SIZE - 1) // CHUNK_SIZE

# 创建全局服务实例
chunk_upload_service = ChunkUploadService()
