"""Download engine - a powerful download core based on yt-dlp.

This module provides the `DownloadEngine` class, which encapsulates the
functionality of `yt-dlp` to manage and execute download tasks. It handles
progress reporting, error recovery, and various `yt-dlp` configurations.
"""

import asyncio
import inspect
import os
import tempfile
from collections.abc import Callable
from pathlib import Path
from typing import Any
from uuid import UUID

import yt_dlp
from loguru import logger
from yt_dlp.utils import DownloadError

from ..models.task import DownloadTask, FileType, TaskStatus
from ..utils.crypto import decrypt_cookies

ProgressCallback = Callable[[UUID, TaskStatus, float | None, float | None, str | None, str | None, str | None], None]


class DownloadEngine:
    """Encapsulates yt-dlp for managing download tasks.

    Attributes:
        progress_callback (ProgressCallback | None): A callback function for
            reporting download progress.
    """

    # 默认 User-Agent 列表
    DEFAULT_USER_AGENTS = [
        "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
    ]

    def __init__(self, progress_callback: ProgressCallback | None = None):
        """Initializes the DownloadEngine.

        Args:
            progress_callback: A callback function that will be called to
                report download progress.
        """
        self.progress_callback = progress_callback
        self._active_downloads: dict[UUID, asyncio.Task] = {}
        self._cancel_flags: dict[UUID, bool] = {}
        try:
            self._loop = asyncio.get_running_loop()
        except RuntimeError:
            self._loop = asyncio.get_event_loop()

    def _write_cookies_to_file(self, cookies_str: str) -> str:
        """Writes the provided cookie string to a temporary file and returns its path.

        Args:
            cookies_str: The cookie string.

        Returns:
            The path to the temporary cookie file.
        """
        temp_file = tempfile.NamedTemporaryFile(mode="w", delete=False, encoding="utf-8")
        try:
            temp_file.write("# Netscape HTTP Cookie File\n")
            temp_file.write("# This file was generated by swiftshadow. Do not edit.\n\n")

            # Parse cookies string
            cookie_pairs = cookies_str.split(';')
            for pair in cookie_pairs:
                if '=' in pair:
                    key, value = pair.strip().split('=', 1)
                    # Try to extract domain from cookie or use generic
                    domain = ".generic.com"
                    flag = "TRUE"
                    path = "/"
                    secure = "FALSE"
                    expiration = "0"  # Session cookie
                    temp_file.write(f"{domain}\t{flag}\t{path}\t{secure}\t{expiration}\t{key}\t{value}\n")
        finally:
            temp_file.close()
        return temp_file.name

    def _get_base_ydl_opts(self, url: str | None = None) -> dict[str, Any]:
        """Gets base yt-dlp options with proper headers and network settings.

        Args:
            url: Optional URL to customize headers.

        Returns:
            Base yt-dlp options.
        """
        # Base options
        opts: dict[str, Any] = {
            "quiet": False,  # Set to False to see detailed errors
            "no_warnings": False,
            "socket_timeout": 60,
            "retries": 5,  # Increase retry attempts
            "fragment_retries": 5,
            "file_access_retries": 3,
            "extractor_retries": 3,
            # Network configuration
            "http_chunk_size": 10485760,  # 10MB chunks
            "concurrent_fragment_downloads": 5,  # HLS/DASH fragment concurrency
        }

        # Set request headers
        headers = {
            "User-Agent": self.DEFAULT_USER_AGENTS[0],
            "Accept": "*/*",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Accept-Encoding": "gzip, deflate, br",
            "Connection": "keep-alive",
        }

        # Add specific Referer for HLS (m3u8) or other streaming if URL is provided
        if url:
            from urllib.parse import urlparse
            parsed = urlparse(url)
            origin = f"{parsed.scheme}://{parsed.netloc}"
            headers["Origin"] = origin
            headers["Referer"] = url

        opts["http_headers"] = headers

        # FFmpeg configuration (for merging video/audio, processing HLS)
        opts["postprocessor_args"] = {
            "ffmpeg": [
                "-threads", "4",  # Use 4 threads
                "-loglevel", "warning",
            ]
        }

        return opts

    def _is_url_expired_error(self, error_message: str) -> bool:
        """Detects if the error indicates URL expiration or temporary link failure.
        
        Args:
            error_message: The error message from yt-dlp.
            
        Returns:
            True if the error indicates URL expiration, False otherwise.
        """
        expiration_patterns = [
            "403 Forbidden",
            "404 Not Found",
            "410 Gone",
            "502 Bad Gateway",
            "503 Service Unavailable",
            "504 Gateway Timeout",
            "signature expired",
            "URL signature",
            "token expired",
            "access denied",
            "video unavailable",
            "this video is no longer available",
            "video has been removed",
            "temporary failure",
            "connection timed out",
            "server error 5",
            "bad gateway",
            "service unavailable",
            "gateway timeout"
        ]

        error_lower = error_message.lower()
        return any(pattern.lower() in error_lower for pattern in expiration_patterns)

    async def _refresh_url_info(self, task: DownloadTask) -> dict[str, Any] | None:
        """Re-extracts fresh URL information from the original source.
        
        Args:
            task: The download task with potentially expired URLs.
            
        Returns:
            Fresh video information or None if extraction fails.
        """
        try:
            logger.info(f"Attempting to refresh URL info for task: {task.title}")

            temp_opts = self._get_base_ydl_opts(task.url)
            temp_opts["extract_flat"] = False

            # Add cookies if available
            if task.cookies:
                decrypted_cookies = decrypt_cookies(task.cookies)
                temp_cookie_path = self._write_cookies_to_file(decrypted_cookies)
                temp_opts["cookiefile"] = temp_cookie_path

            # Extract fresh info
            fresh_info = await asyncio.to_thread(self._extract_info_sync, task.url, temp_opts)

            if fresh_info:
                logger.info(f"Successfully refreshed URL info for task: {task.title}")
                return fresh_info
            else:
                logger.warning(f"Failed to refresh URL info for task: {task.title}")
                return None

        except Exception as e:
            logger.error(f"Error refreshing URL info for task {task.title}: {e}")
            return None

    async def _attempt_smart_recovery(self, task: DownloadTask, original_error: str,
                                      max_recovery_attempts: int = 3) -> bool:
        """Attempts smart recovery when download fails due to URL expiration.
        
        Args:
            task: The failed download task.
            original_error: The original error message.
            max_recovery_attempts: Maximum number of recovery attempts.
            
        Returns:
            True if recovery was successful, False otherwise.
        """
        if not self._is_url_expired_error(original_error):
            logger.debug(f"Error does not indicate URL expiration for task {task.title}: {original_error}")
            return False

        logger.info(f"Detected URL expiration for task {task.title}, attempting smart recovery...")

        for attempt in range(1, max_recovery_attempts + 1):
            try:
                logger.info(f"Recovery attempt {attempt}/{max_recovery_attempts} for task: {task.title}")

                # Wait with exponential backoff
                if attempt > 1:
                    wait_time = min(2 ** (attempt - 1), 30)
                    logger.info(f"Waiting {wait_time} seconds before recovery attempt {attempt}")
                    await asyncio.sleep(wait_time)

                # Try to refresh URL information
                fresh_info = await self._refresh_url_info(task)
                if not fresh_info:
                    logger.warning(
                        f"Recovery attempt {attempt} failed: Could not refresh URL info for task {task.title}")
                    continue

                # Build new yt-dlp options with fresh info
                ydl_opts = await self._build_ydl_options(task)

                # Attempt download with refreshed info
                logger.info(f"Attempting download with refreshed URLs for task: {task.title}")
                success = await asyncio.to_thread(self._download_sync, task, ydl_opts)

                if success:
                    logger.info(f"Smart recovery successful for task {task.title} on attempt {attempt}")
                    return True
                else:
                    logger.warning(f"Recovery attempt {attempt} failed for task {task.title}")

            except Exception as e:
                logger.error(f"Recovery attempt {attempt} failed with exception for task {task.title}: {e}")

        logger.error(f"All {max_recovery_attempts} recovery attempts failed for task: {task.title}")
        return False

    async def extract_info(self, url: str, cookies: str | None = None) -> dict[str, Any]:
        """Extracts information about a given URL without downloading.

        Args:
            url: The URL to extract information from.
            cookies: Cookie string for authentication (optional).

        Returns:
            A dictionary containing the extracted information.

        Raises:
            Exception: If extraction fails after retries.
        """
        logger.info(f"Extracting info for URL: {url}")

        # Use base configuration
        ydl_opts = self._get_base_ydl_opts(url)
        ydl_opts["extract_flat"] = False
        ydl_opts["force_generic_extractor"] = False

        # Add Cookie: Supports passing Netscape cookie file path or raw cookie string
        temp_cookie_path = None
        if cookies:
            try:
                if os.path.isfile(cookies):
                    ydl_opts["cookiefile"] = cookies
                else:
                    temp_cookie_path = self._write_cookies_to_file(cookies)
                    ydl_opts["cookiefile"] = temp_cookie_path
            except Exception as e:
                logger.warning(f"Cookies processing failed: {e}")

        try:
            info = await asyncio.to_thread(self._extract_info_sync, url, ydl_opts)

            if info:
                logger.success(f"Successfully extracted info for URL: {url}")
                return info
            else:
                error_msg = f"Could not get information for {url}"
                logger.error(error_msg)
                raise Exception(error_msg)

        except Exception as e:
            logger.error(f"Error extracting info for URL {url}: {e}")
            raise Exception(f"Failed to extract information: {e}")
        finally:
            if temp_cookie_path and os.path.exists(temp_cookie_path):
                os.unlink(temp_cookie_path)

    async def get_formats(self, url: str) -> list[dict[str, Any]]:
        """Retrieves available formats for a given URL.

        Args:
            url: The URL to retrieve formats from.

        Returns:
            A list of dictionaries, each representing an available format.
        """
        try:
            logger.info(f"Getting formats for URL: {url}")

            ydl_opts = self._get_base_ydl_opts(url)
            ydl_opts["extract_flat"] = False  # Full format information is required

            info = await asyncio.to_thread(self._extract_info_sync, url, ydl_opts)

            if info and "formats" in info:
                formats = info["formats"]
                logger.success(f"Successfully retrieved {len(formats)} formats for URL: {url}")
                return formats
            else:
                logger.warning(f"No formats found for URL: {url}")
                return []

        except Exception as e:
            logger.error(f"Error getting formats for URL {url}: {e}")
            return []

    def _extract_info_sync(self, url: str, ydl_opts: dict[str, Any]) -> dict[str, Any] | None:
        """Synchronously extracts information about a URL using yt-dlp.

        Args:
            url: The URL to extract information from.
            ydl_opts: yt-dlp options for extraction.

        Returns:
            Extracted information or None.
        """
        try:
            logger.debug(f"Starting yt-dlp extraction for: {url}")
            logger.debug(f"yt-dlp options: {ydl_opts}")

            with yt_dlp.YoutubeDL(ydl_opts) as ydl:
                info = ydl.extract_info(url, download=False)

            if info is None:
                logger.error(f"No information found for {url}")
                return None

            # Determine file type
            _file_type = info.get('_type') or info.get('ext')
            if _file_type and _file_type.lower() in [ft.value for ft in FileType]:
                info['file_type'] = FileType(_file_type.lower())
            else:
                # Infer type based on URL or format
                if '.m3u8' in url.lower() or info.get('protocol') == 'm3u8':
                    info['file_type'] = FileType.VIDEO
                else:
                    info['file_type'] = FileType.OTHER

            logger.debug(f"Extracted info for {url}: title={info.get('title')}, file_type={info.get('file_type')}")
            return info

        except DownloadError as e:
            logger.error(f"yt-dlp info extraction failed for {url}: {e}")
            raise
        except Exception as e:
            logger.error(f"Unexpected error during info extraction for {url}: {e}")
            raise

    def _progress_hook(self, d: dict[str, Any]) -> None:
        """Progress hook for yt-dlp to report download progress.

        Args:
            d: The progress dictionary from yt-dlp.
        """
        # The old implementation relied on yt-dlp injecting task_id into the progress dictionary, but it doesn't.
        # This method is kept for compatibility but is no longer directly used. Please use _make_progress_hook instead.
        logger.debug("_progress_hook called but no bound task_id; use _make_progress_hook instead.")
        return

    def _make_progress_hook(self, task_id: str) -> Callable[[dict[str, Any]], None]:
        """Creates a yt-dlp progress hook bound to a specific task_id.

        This ensures progress updates are dispatched to the event loop even when
        called from yt-dlp's worker thread.

        Args:
            task_id: The task ID to bind.

        Returns:
            The progress hook function.
        """

        def hook(d: dict[str, Any]) -> None:
            if self.progress_callback is None:
                return

            # Status and progress parsing
            status = TaskStatus.DOWNLOADING
            progress = None
            speed = None
            eta = None
            error_message = None

            try:
                if d.get("status") == "downloading":
                    if d.get("total_bytes") and d.get("downloaded_bytes"):
                        progress = d["downloaded_bytes"] / d["total_bytes"] * 100
                    elif d.get("total_bytes_estimate") and d.get("downloaded_bytes"):
                        progress = d["downloaded_bytes"] / d["total_bytes_estimate"] * 100
                    speed = d.get("speed")
                    eta = d.get("eta")
                elif d.get("status") == "finished":
                    status = TaskStatus.COMPLETED
                    progress = 100.0
                elif d.get("status") == "error":
                    status = TaskStatus.FAILED
                    error_message = d.get("error")
            except Exception as e:
                logger.warning(f"Progress hook parsing error: {e}")

            # Dispatch asynchronous callback to the main event loop to avoid unexecuted coroutines when called from within the thread
            try:
                if inspect.iscoroutinefunction(self.progress_callback):
                    asyncio.run_coroutine_threadsafe(
                        self.progress_callback(UUID(task_id), status, progress, speed, eta, error_message,
                                               d.get("filename")),
                        self._loop,
                    )
                else:
                    # Synchronous callback is called directly
                    self.progress_callback(UUID(task_id), status, progress, speed, eta, error_message,
                                           d.get("filename"))
            except Exception as e:
                logger.error(f"Dispatch progress callback failed: {e}")

        return hook

    async def _build_ydl_options(self, task: DownloadTask) -> dict[str, Any]:
        """Builds yt-dlp options based on the download task.

        Args:
            task: The download task.

        Returns:
            A dictionary of yt-dlp options.
        """
        # Start with base configuration
        ydl_opts = self._get_base_ydl_opts(task.url)

        # Ensure save directory exists
        try:
            out_path = Path(task.save_path)
            out_path.parent.mkdir(parents=True, exist_ok=True)
        except Exception as e:
            logger.warning(f"Failed to ensure output directory for {task.title}: {e}")

        # Download configuration
        ydl_opts.update({
            "format": task.format_id if task.format_id else "bestvideo+bestaudio/best",
            "outtmpl": str(task.save_path),
            # Bind task_id to the progress hook to ensure status updates
            "progress_hooks": [self._make_progress_hook(task.id)],
            "noplaylist": True,
            "noprogress": False,  # Enable progress display
        })

        # Cookie configuration
        if task.cookies:
            decrypted_cookies = decrypt_cookies(task.cookies)
            temp_cookie_path = self._write_cookies_to_file(decrypted_cookies)
            ydl_opts["cookiefile"] = temp_cookie_path

        # File type specific configuration
        self._set_postprocessor_opts(task, ydl_opts)

        return ydl_opts

    def _set_postprocessor_opts(self, task: DownloadTask, ydl_opts: dict[str, Any]) -> None:
        """Sets post-processor options based on the task's file type.

        Args:
            task: The download task.
            ydl_opts: The yt-dlp configuration options.
        """
        # For audio extraction
        if task.file_type == FileType.AUDIO:
            if "postprocessors" not in ydl_opts:
                ydl_opts["postprocessors"] = []
            ydl_opts["postprocessors"].append({
                'key': 'FFmpegExtractAudio',
                'preferredcodec': 'm4a',
                'preferredquality': '192',
            })

        # FFmpeg -i parameter configuration for HLS (m3u8): protocol_whitelist and custom headers
        try:
            is_hls = False
            if isinstance(task.url, str) and 'm3u8' in task.url.lower():
                is_hls = True

            if is_hls:
                # Construct headers: merge base http_headers with task.custom_headers
                base_headers: dict[str, str] = ydl_opts.get("http_headers", {}) or {}
                custom_headers: dict[str, str] = task.custom_headers or {}
                merged = {**base_headers, **custom_headers}

                header_lines = [f"{k}: {v}" for k, v in merged.items()]
                header_string = "\r\n".join(header_lines)

                ffmpeg_i_args = [
                    "-protocol_whitelist", "file,http,https,tcp,tls,crypto",
                    "-headers", header_string,
                ]

                if "postprocessor_args" not in ydl_opts:
                    ydl_opts["postprocessor_args"] = {}
                # ffmpeg -i parameters (checked in tests)
                ydl_opts["postprocessor_args"]["ffmpeg_i"] = ffmpeg_i_args
        except Exception as e:
            logger.warning(f"Failed to set HLS ffmpeg_i args: {e}")

    def cancel_download(self, task_id: UUID) -> None:
        """Cancels a running download task.

        Args:
            task_id: The ID of the task to cancel.
        """
        if task_id in self._active_downloads:
            logger.info(f"Attempting to cancel download for task ID: {task_id}")
            self._cancel_flags[task_id] = True
            self._active_downloads[task_id].cancel()
        else:
            logger.warning(f"No active download found for task ID: {task_id}")

    def get_active_downloads(self) -> dict[UUID, asyncio.Task]:
        """Returns a dictionary of active download tasks.

        Returns:
            A dictionary where keys are task IDs and values are asyncio.Task objects.
        """
        return self._active_downloads

    async def download(self, task: DownloadTask) -> bool:
        """Initiates the download process for a given task.

        Args:
            task: The download task to execute.

        Returns:
            True if the download was successful, False otherwise.
        """
        try:
            logger.info(f"Starting download for task: {task.title}")
            ydl_opts = await self._build_ydl_options(task)
            success, err_msg = await asyncio.to_thread(self._download_sync, task, ydl_opts)

            if success:
                logger.success(f"Download completed successfully for: {task.title}")
            else:
                logger.error(f"Download failed for: {task.title} - {err_msg or 'unknown error'}")
                # Smart recovery: handle URL expiration/temporary errors (e.g., 502)
                if err_msg and self._is_url_expired_error(err_msg):
                    recovered = await self._attempt_smart_recovery(task, err_msg)
                    if recovered:
                        return True
                    else:
                        logger.error(f"Smart recovery failed for: {task.title}")

            return success
        except Exception as e:
            logger.error(f"Error during download for task {task.title}: {e}")
            return False

    def _download_sync(self, task: DownloadTask, ydl_opts: dict) -> tuple[bool, str | None]:
        """Synchronously downloads the file.

        Args:
            task: The download task to execute.
            ydl_opts: yt-dlp options for download.

        Returns:
            (True, None) if successful, otherwise (False, error_message).
        """
        try:
            logger.info(f"Executing yt-dlp download for: {task.title}")
            with yt_dlp.YoutubeDL(ydl_opts) as ydl:
                ydl.download([task.url])
            logger.success(f"yt-dlp download completed for {task.title}")
            return True, None
        except DownloadError as e:
            logger.error(f"yt-dlp download failed for {task.url}: {e}")
            return False, str(e)
        except Exception as e:
            logger.error(f"Unexpected error during download for {task.url}: {e}")
            return False, str(e)
