"""
Async File Processing Service
异步文件处理服务 - 异步文件上传/下载，支持进度跟踪和错误处理
"""

import asyncio
import aiofiles
import aiohttp
import uuid
import hashlib
from typing import Optional, Dict, Any, List, Callable, BinaryIO
from datetime import datetime, timedelta
from pathlib import Path
import structlog

from ..core.config import settings
from ..core.exceptions import (
    FileUploadError,
    FileDownloadError,
    FileValidationError,
    FileSizeExceededError,
    FileNotFoundError,
    ValidationError,
)
from .file_storage_service import FileStorageService

# 配置结构化日志
logger = structlog.get_logger(__name__)


class AsyncFileProcessor:
    """异步文件处理器"""

    def __init__(self):
        self.upload_sessions: Dict[str, Dict[str, Any]] = {}
        self.download_sessions: Dict[str, Dict[str, Any]] = {}
        self.progress_callbacks: Dict[str, List[Callable]] = {}
        self.completion_callbacks: Dict[str, List[Callable]] = {}
        self.chunk_size = 1024 * 1024  # 1MB chunks
        self.max_concurrent_uploads = 5
        self.max_concurrent_downloads = 5

        # 信号量控制并发数
        self.upload_semaphore = asyncio.Semaphore(self.max_concurrent_uploads)
        self.download_semaphore = asyncio.Semaphore(self.max_concurrent_downloads)

    async def upload_file_async(
        self,
        file_path: str,
        destination_path: str,
        user_id: str,
        progress_callback: Optional[Callable] = None,
        metadata: Optional[Dict[str, Any]] = None,
    ) -> Dict[str, Any]:
        """
        异步上传文件

        Args:
            file_path: 本地文件路径
            destination_path: 目标存储路径
            user_id: 用户ID
            progress_callback: 进度回调函数
            metadata: 文件元数据

        Returns:
            Dict[str, Any]: 上传结果信息
        """
        session_id = str(uuid.uuid4())

        try:
            logger.info(
                "Starting async file upload",
                session_id=session_id,
                file_path=file_path,
                destination_path=destination_path,
                user_id=user_id,
            )

            # 验证文件
            file_path_obj = Path(file_path)
            if not file_path_obj.exists():
                raise FileNotFoundError(f"文件不存在: {file_path}")

            if not file_path_obj.is_file():
                raise FileValidationError("路径不是文件")

            file_size = file_path_obj.stat().st_size

            # 验证文件大小限制
            max_size = settings.storage.max_file_size_mb * 1024 * 1024
            if file_size > max_size:
                raise FileSizeExceededError(
                    f"文件大小超过限制: {file_size} > {max_size}"
                )

            # 注册进度回调
            if progress_callback:
                self.add_progress_callback(session_id, progress_callback)

            # 计算文件哈希
            await self._notify_progress(session_id, 0, "计算文件哈希...")
            file_hash = await self._calculate_file_hash_async(file_path)

            # 检查是否已存在相同文件
            await self._notify_progress(session_id, 10, "检查文件重复...")
            existing_file = await self._check_duplicate_file(file_hash, user_id)
            if existing_file:
                logger.info(
                    "File already exists, returning existing file info",
                    session_id=session_id,
                    file_hash=file_hash,
                )
                return {
                    "session_id": session_id,
                    "file_id": existing_file["file_id"],
                    "file_path": existing_file["file_path"],
                    "file_size": existing_file["file_size"],
                    "file_hash": file_hash,
                    "is_duplicate": True,
                    "upload_time": 0,
                }

            # 使用信号量控制并发
            async with self.upload_semaphore:
                # 执行异步上传
                upload_result = await self._perform_async_upload(
                    session_id,
                    file_path,
                    destination_path,
                    file_size,
                    file_hash,
                    metadata,
                )

            await self._notify_progress(session_id, 100, "上传完成")

            # 通知完成回调
            await self._notify_completion(session_id, upload_result)

            logger.info(
                "Async file upload completed successfully",
                session_id=session_id,
                file_path=file_path,
                destination_path=destination_path,
                file_size=file_size,
                upload_time=upload_result.get("upload_time", 0),
            )

            return upload_result

        except Exception as e:
            logger.error(
                "Async file upload failed",
                session_id=session_id,
                file_path=file_path,
                error=str(e),
                error_type=type(e).__name__,
            )

            await self._notify_error(session_id, e)
            raise FileUploadError(f"文件上传失败: {str(e)}")

        finally:
            self._cleanup_session(session_id)

    async def download_file_async(
        self,
        file_path: str,
        local_destination: str,
        progress_callback: Optional[Callable] = None,
        chunk_size: Optional[int] = None,
    ) -> Dict[str, Any]:
        """
        异步下载文件

        Args:
            file_path: 远程文件路径
            local_destination: 本地目标路径
            progress_callback: 进度回调函数
            chunk_size: 块大小

        Returns:
            Dict[str, Any]: 下载结果信息
        """
        session_id = str(uuid.uuid4())

        try:
            logger.info(
                "Starting async file download",
                session_id=session_id,
                file_path=file_path,
                local_destination=local_destination,
            )

            # 注册进度回调
            if progress_callback:
                self.add_progress_callback(session_id, progress_callback)

            # 使用信号量控制并发
            async with self.download_semaphore:
                # 执行异步下载
                download_result = await self._perform_async_download(
                    session_id,
                    file_path,
                    local_destination,
                    chunk_size or self.chunk_size,
                )

            await self._notify_progress(session_id, 100, "下载完成")

            # 通知完成回调
            await self._notify_completion(session_id, download_result)

            logger.info(
                "Async file download completed successfully",
                session_id=session_id,
                file_path=file_path,
                local_destination=local_destination,
                file_size=download_result.get("file_size", 0),
                download_time=download_result.get("download_time", 0),
            )

            return download_result

        except Exception as e:
            logger.error(
                "Async file download failed",
                session_id=session_id,
                file_path=file_path,
                error=str(e),
                error_type=type(e).__name__,
            )

            await self._notify_error(session_id, e)
            raise FileDownloadError(f"文件下载失败: {str(e)}")

        finally:
            self._cleanup_session(session_id)

    async def upload_file_chunked_async(
        self,
        file_path: str,
        destination_path: str,
        user_id: str,
        chunk_size: Optional[int] = None,
        progress_callback: Optional[Callable] = None,
    ) -> Dict[str, Any]:
        """
        异步分块上传文件

        Args:
            file_path: 本地文件路径
            destination_path: 目标存储路径
            user_id: 用户ID
            chunk_size: 块大小
            progress_callback: 进度回调函数

        Returns:
            Dict[str, Any]: 上传结果信息
        """
        session_id = str(uuid.uuid4())
        chunk_size = chunk_size or self.chunk_size

        try:
            logger.info(
                "Starting async chunked file upload",
                session_id=session_id,
                file_path=file_path,
                destination_path=destination_path,
                chunk_size=chunk_size,
            )

            # 验证文件
            file_path_obj = Path(file_path)
            if not file_path_obj.exists():
                raise FileNotFoundError(f"文件不存在: {file_path}")

            file_size = file_path_obj.stat().st_size
            total_chunks = (file_size + chunk_size - 1) // chunk_size

            # 注册进度回调
            if progress_callback:
                self.add_progress_callback(session_id, progress_callback)

            # 创建上传会话
            upload_session = {
                "session_id": session_id,
                "file_path": file_path,
                "destination_path": destination_path,
                "user_id": user_id,
                "file_size": file_size,
                "chunk_size": chunk_size,
                "total_chunks": total_chunks,
                "uploaded_chunks": set(),
                "start_time": datetime.utcnow(),
            }

            self.upload_sessions[session_id] = upload_session

            # 使用信号量控制并发
            async with self.upload_semaphore:
                # 并发上传所有块
                upload_tasks = []
                for chunk_index in range(total_chunks):
                    task = self._upload_chunk_async(
                        session_id,
                        chunk_index,
                        file_path,
                        destination_path,
                        chunk_size,
                        file_size,
                    )
                    upload_tasks.append(task)

                # 等待所有块上传完成
                chunk_results = await asyncio.gather(
                    *upload_tasks, return_exceptions=True
                )

                # 检查是否有失败的块
                failed_chunks = [
                    i
                    for i, result in enumerate(chunk_results)
                    if isinstance(result, Exception)
                ]
                if failed_chunks:
                    raise FileUploadError(f"部分块上传失败: {failed_chunks}")

                # 合并块（如果需要）
                await self._notify_progress(session_id, 90, "合并文件块...")
                final_path = await self._merge_chunks_async(
                    session_id, destination_path
                )

            upload_time = (
                datetime.utcnow() - upload_session["start_time"]
            ).total_seconds()

            await self._notify_progress(session_id, 100, "分块上传完成")

            result = {
                "session_id": session_id,
                "file_path": final_path,
                "file_size": file_size,
                "total_chunks": total_chunks,
                "chunk_size": chunk_size,
                "upload_time": upload_time,
                "is_chunked": True,
            }

            # 通知完成回调
            await self._notify_completion(session_id, result)

            logger.info(
                "Async chunked file upload completed successfully",
                session_id=session_id,
                file_path=file_path,
                total_chunks=total_chunks,
                upload_time=upload_time,
            )

            return result

        except Exception as e:
            logger.error(
                "Async chunked file upload failed",
                session_id=session_id,
                file_path=file_path,
                error=str(e),
                error_type=type(e).__name__,
            )

            await self._notify_error(session_id, e)
            raise FileUploadError(f"分块上传失败: {str(e)}")

        finally:
            self._cleanup_session(session_id)

    async def process_multiple_files_async(
        self,
        file_paths: List[str],
        destination_dir: str,
        user_id: str,
        progress_callback: Optional[Callable] = None,
    ) -> List[Dict[str, Any]]:
        """
        批量异步处理多个文件

        Args:
            file_paths: 文件路径列表
            destination_dir: 目标目录
            user_id: 用户ID
            progress_callback: 进度回调函数

        Returns:
            List[Dict[str, Any]]: 处理结果列表
        """
        total_files = len(file_paths)

        logger.info(
            "Starting batch file processing",
            total_files=total_files,
            destination_dir=destination_dir,
            user_id=user_id,
        )

        # 创建批量处理任务
        tasks = []
        for i, file_path in enumerate(file_paths):
            # 生成目标路径
            file_name = Path(file_path).name
            destination_path = f"{destination_dir}/{file_name}"

            # 创建带进度跟踪的上传任务
            def create_progress_wrapper(index):
                async def progress_wrapper(progress, message):
                    overall_progress = (index * 100 + progress) / total_files
                    if progress_callback:
                        await progress_callback(
                            overall_progress,
                            f"处理文件 {index + 1}/{total_files}: {message}",
                        )

                return progress_wrapper

            task = self.upload_file_async(
                file_path=file_path,
                destination_path=destination_path,
                user_id=user_id,
                progress_callback=create_progress_wrapper(i),
                metadata={"batch_index": i, "total_files": total_files},
            )
            tasks.append(task)

        # 控制并发数，分批处理
        batch_size = 3
        results = []

        for i in range(0, len(tasks), batch_size):
            batch_tasks = tasks[i : i + batch_size]
            batch_results = await asyncio.gather(*batch_tasks, return_exceptions=True)

            for j, result in enumerate(batch_results):
                file_index = i + j
                if isinstance(result, Exception):
                    logger.error(
                        f"File {file_index + 1} processing failed",
                        file_path=file_paths[file_index],
                        error=str(result),
                    )
                    results.append(
                        {
                            "file_path": file_paths[file_index],
                            "success": False,
                            "error": str(result),
                        }
                    )
                else:
                    results.append(result)

        logger.info(
            "Batch file processing completed",
            total_files=total_files,
            successful_count=len([r for r in results if r.get("success", True)]),
            failed_count=len([r for r in results if not r.get("success", True)]),
        )

        return results

    # 私有辅助方法

    async def _calculate_file_hash_async(self, file_path: str) -> str:
        """异步计算文件哈希 - 优化版本，使用流式处理减少内存使用"""
        hash_md5 = hashlib.md5(usedforsecurity=False)  # 用于文件完整性检查，非安全用途
        chunk_size = min(self.chunk_size, 8192)  # 使用较小的块大小减少内存占用

        try:
            async with aiofiles.open(file_path, "rb") as f:
                bytes_processed = 0
                while chunk := await f.read(chunk_size):
                    hash_md5.update(chunk)
                    bytes_processed += len(chunk)

                    # 对于大文件，定期让出控制权避免阻塞事件循环
                    if bytes_processed % (1024 * 1024) == 0:  # 每MB让出一次
                        await asyncio.sleep(0)

        except Exception as e:
            logger.error("文件哈希计算失败", file_path=file_path, error=str(e))
            raise FileValidationError(f"文件哈希计算失败: {str(e)}")

        return hash_md5.hexdigest()

    async def _check_duplicate_file(
        self, file_hash: str, user_id: str
    ) -> Optional[Dict[str, Any]]:
        """检查重复文件"""
        # 这里应该查询数据库检查是否已存在相同哈希的文件
        # 为了演示，返回None表示没有重复
        return None

    async def _perform_async_upload(
        self,
        session_id: str,
        file_path: str,
        destination_path: str,
        file_size: int,
        file_hash: str,
        metadata: Optional[Dict[str, Any]],
    ) -> Dict[str, Any]:
        """执行异步上传 - 优化版本，使用流式处理减少内存使用"""
        start_time = datetime.utcnow()
        bytes_uploaded = 0
        chunk_count = 0

        try:
            # 使用较小的块大小进行流式上传
            upload_chunk_size = min(self.chunk_size, 8192)

            async with aiofiles.open(file_path, "rb") as source_file:
                async with aiofiles.open(destination_path, "wb") as dest_file:
                    while chunk := await source_file.read(upload_chunk_size):
                        await dest_file.write(chunk)
                        bytes_uploaded += len(chunk)
                        chunk_count += 1

                        # 每处理一定数据量后更新进度并让出控制权
                        if chunk_count % 100 == 0:  # 每100个chunk更新一次
                            progress = int((bytes_uploaded / file_size) * 90)
                            await self._notify_progress(
                                session_id,
                                progress,
                                f"上传中... {bytes_uploaded}/{file_size} bytes",
                            )
                            # 让出控制权避免阻塞事件循环
                            await asyncio.sleep(0)

                        # 对于超大文件，定期强制垃圾回收
                        if (
                            chunk_count % 1000 == 0
                            and bytes_uploaded > 100 * 1024 * 1024
                        ):  # 每100MB
                            import gc

                            gc.collect(0)  # 只收集第0代，开销较小

            upload_time = (datetime.utcnow() - start_time).total_seconds()

            logger.debug(
                "文件上传完成",
                session_id=session_id,
                file_path=file_path,
                file_size=file_size,
                upload_time=upload_time,
                chunks_processed=chunk_count,
            )

            return {
                "session_id": session_id,
                "file_path": destination_path,
                "file_size": file_size,
                "file_hash": file_hash,
                "upload_time": upload_time,
                "is_duplicate": False,
            }

        except Exception as e:
            logger.error(
                "文件上传失败",
                session_id=session_id,
                file_path=file_path,
                error=str(e),
                bytes_uploaded=bytes_uploaded,
                chunks_processed=chunk_count,
            )
            raise FileUploadError(f"文件上传失败: {str(e)}")

    async def _perform_async_download(
        self, session_id: str, file_path: str, local_destination: str, chunk_size: int
    ) -> Dict[str, Any]:
        """执行异步下载"""
        start_time = datetime.utcnow()
        file_size = 0
        bytes_downloaded = 0

        # 模拟下载进度
        async with aiofiles.open(file_path, "rb") as source_file:
            async with aiofiles.open(local_destination, "wb") as dest_file:
                while chunk := await source_file.read(chunk_size):
                    await dest_file.write(chunk)
                    bytes_downloaded += len(chunk)
                    file_size += len(chunk)

                    # 更新进度
                    progress = int((bytes_downloaded / (file_size or 1)) * 90)
                    await self._notify_progress(
                        session_id, progress, f"下载中... {bytes_downloaded} bytes"
                    )

        download_time = (datetime.utcnow() - start_time).total_seconds()

        return {
            "session_id": session_id,
            "file_path": local_destination,
            "file_size": file_size,
            "download_time": download_time,
        }

    async def _upload_chunk_async(
        self,
        session_id: str,
        chunk_index: int,
        file_path: str,
        destination_path: str,
        chunk_size: int,
        total_size: int,
    ) -> Dict[str, Any]:
        """异步上传单个文件块"""
        try:
            start_offset = chunk_index * chunk_size
            end_offset = min(start_offset + chunk_size, total_size)
            chunk_size_actual = end_offset - start_offset

            async with aiofiles.open(file_path, "rb") as f:
                await f.seek(start_offset)
                chunk_data = await f.read(chunk_size_actual)

            # 模拟块上传
            await asyncio.sleep(0.1)  # 模拟网络延迟

            # 更新进度
            overall_progress = int(((chunk_index + 1) * chunk_size) / total_size * 80)
            await self._notify_progress(
                session_id, overall_progress, f"上传块 {chunk_index + 1}..."
            )

            return {
                "chunk_index": chunk_index,
                "chunk_size": chunk_size_actual,
                "success": True,
            }

        except Exception as e:
            logger.error(
                f"Chunk upload failed",
                session_id=session_id,
                chunk_index=chunk_index,
                error=str(e),
            )
            raise FileUploadError(f"块 {chunk_index} 上传失败: {str(e)}")

    async def _merge_chunks_async(self, session_id: str, destination_path: str) -> str:
        """异步合并文件块"""
        # 这里应该实现实际的块合并逻辑
        # 为了演示，直接返回目标路径
        return destination_path

    def add_progress_callback(self, session_id: str, callback: Callable) -> None:
        """添加进度回调"""
        if session_id not in self.progress_callbacks:
            self.progress_callbacks[session_id] = []
        self.progress_callbacks[session_id].append(callback)

    def add_completion_callback(self, session_id: str, callback: Callable) -> None:
        """添加完成回调"""
        if session_id not in self.completion_callbacks:
            self.completion_callbacks[session_id] = []
        self.completion_callbacks[session_id].append(callback)

    async def _notify_progress(
        self, session_id: str, progress: int, message: str
    ) -> None:
        """通知进度更新"""
        callbacks = self.progress_callbacks.get(session_id, [])
        for callback in callbacks:
            try:
                if asyncio.iscoroutinefunction(callback):
                    await callback(progress, message)
                else:
                    callback(progress, message)
            except Exception as e:
                logger.error(
                    "Progress callback failed", session_id=session_id, error=str(e)
                )

    async def _notify_completion(self, session_id: str, result: Dict[str, Any]) -> None:
        """通知完成"""
        callbacks = self.completion_callbacks.get(session_id, [])
        for callback in callbacks:
            try:
                if asyncio.iscoroutinefunction(callback):
                    await callback(result)
                else:
                    callback(result)
            except Exception as e:
                logger.error(
                    "Completion callback failed", session_id=session_id, error=str(e)
                )

    async def _notify_error(self, session_id: str, error: Exception) -> None:
        """通知错误"""
        callbacks = self.completion_callbacks.get(session_id, [])
        for callback in callbacks:
            try:
                if asyncio.iscoroutinefunction(callback):
                    await callback(None, error)
                else:
                    callback(None, error)
            except Exception as e:
                logger.error(
                    "Error callback failed", session_id=session_id, error=str(e)
                )

    def _cleanup_session(self, session_id: str) -> None:
        """清理会话"""
        self.upload_sessions.pop(session_id, None)
        self.download_sessions.pop(session_id, None)
        self.progress_callbacks.pop(session_id, None)
        self.completion_callbacks.pop(session_id, None)

    async def get_upload_status(self, session_id: str) -> Optional[Dict[str, Any]]:
        """获取上传状态"""
        session = self.upload_sessions.get(session_id)
        if not session:
            return None

        return {
            "session_id": session_id,
            "file_path": session["file_path"],
            "file_size": session["file_size"],
            "total_chunks": session.get("total_chunks", 0),
            "uploaded_chunks": len(session.get("uploaded_chunks", set())),
            "progress": int(
                len(session.get("uploaded_chunks", set()))
                / session.get("total_chunks", 1)
                * 100
            ),
        }

    async def get_download_status(self, session_id: str) -> Optional[Dict[str, Any]]:
        """获取下载状态"""
        session = self.download_sessions.get(session_id)
        if not session:
            return None

        return {
            "session_id": session_id,
            "file_path": session["file_path"],
            "local_destination": session["local_destination"],
            "file_size": session.get("file_size", 0),
            "bytes_downloaded": session.get("bytes_downloaded", 0),
            "progress": int(
                session.get("bytes_downloaded", 0)
                / max(session.get("file_size", 1), 1)
                * 100
            ),
        }


# 全局异步文件处理器实例
async_file_processor = AsyncFileProcessor()


# 导出服务
__all__ = ["AsyncFileProcessor", "async_file_processor"]
