"""Download Manager for system update packages.

Provides reliable HTTP/HTTPS download with resume support, progress reporting,
and integrity verification.
"""

import os
import json
import asyncio
import hashlib
import logging
import time
from pathlib import Path
from typing import Optional, Dict, Callable, Any, Tuple
from dataclasses import dataclass, asdict
from enum import Enum
import aiohttp
import aiofiles
from datetime import datetime


logger = logging.getLogger(__name__)


class DownloadState(Enum):
    """Download state enumeration."""
    IDLE = "IDLE"
    PREPARING = "PREPARING"
    DOWNLOADING = "DOWNLOADING"
    PAUSED = "PAUSED"
    VERIFYING = "VERIFYING"
    COMPLETE = "COMPLETE"
    FAILED = "FAILED"
    CANCELLED = "CANCELLED"


class DownloadError(Exception):
    """Download-specific error."""
    pass


@dataclass
class DownloadInfo:
    """Download information and metadata."""
    url: str
    filename: str
    total_size: int
    downloaded_size: int
    checksum: Optional[str] = None
    checksum_type: str = "sha256"
    state: DownloadState = DownloadState.IDLE
    start_time: Optional[float] = None
    end_time: Optional[float] = None
    error: Optional[str] = None
    temp_path: Optional[str] = None
    final_path: Optional[str] = None
    
    def to_dict(self) -> Dict:
        """Convert to dictionary for serialization."""
        data = asdict(self)
        data['state'] = self.state.value
        return data
    
    @classmethod
    def from_dict(cls, data: Dict) -> 'DownloadInfo':
        """Create from dictionary."""
        data['state'] = DownloadState(data['state'])
        return cls(**data)


@dataclass
class DownloadProgress:
    """Download progress information."""
    state: DownloadState
    progress_percent: float
    downloaded_bytes: int
    total_bytes: int
    speed_bps: float  # Bytes per second
    eta_seconds: Optional[int]
    elapsed_seconds: float
    
    @property
    def speed_mbps(self) -> float:
        """Get speed in MB/s."""
        return self.speed_bps / (1024 * 1024)
    
    @property
    def eta_formatted(self) -> str:
        """Get formatted ETA string."""
        if self.eta_seconds is None:
            return "Unknown"
        
        hours = self.eta_seconds // 3600
        minutes = (self.eta_seconds % 3600) // 60
        seconds = self.eta_seconds % 60
        
        if hours > 0:
            return f"{hours:02d}:{minutes:02d}:{seconds:02d}"
        return f"{minutes:02d}:{seconds:02d}"


class DownloadManager:
    """Manages file downloads with resume support and progress tracking."""
    
    # Default configuration
    DEFAULT_CHUNK_SIZE = 8192  # 8KB chunks
    DEFAULT_TIMEOUT = 30  # 30 seconds
    DEFAULT_MAX_RETRIES = 3
    DEFAULT_CACHE_DIR = "/var/cache/cloud-printer/downloads"
    
    def __init__(self, 
                 cache_dir: Optional[str] = None,
                 chunk_size: int = DEFAULT_CHUNK_SIZE,
                 timeout: int = DEFAULT_TIMEOUT,
                 max_retries: int = DEFAULT_MAX_RETRIES,
                 max_bandwidth: Optional[int] = None):
        """Initialize download manager.
        
        Args:
            cache_dir: Directory for temporary download files
            chunk_size: Size of download chunks in bytes
            timeout: Request timeout in seconds
            max_retries: Maximum retry attempts
            max_bandwidth: Maximum bandwidth in bytes/sec (None for unlimited)
        """
        self.cache_dir = Path(cache_dir or self.DEFAULT_CACHE_DIR)
        self.chunk_size = chunk_size
        self.timeout = aiohttp.ClientTimeout(total=timeout)
        self.max_retries = max_retries
        self.max_bandwidth = max_bandwidth
        
        # Download tracking
        self.downloads: Dict[str, DownloadInfo] = {}
        self.sessions: Dict[str, aiohttp.ClientSession] = {}
        self.tasks: Dict[str, asyncio.Task] = {}
        self.progress_callbacks: Dict[str, Callable] = {}
        
        # Speed calculation
        self.speed_samples: Dict[str, list] = {}
        self.speed_window = 5  # seconds
        
        # Ensure cache directory exists
        self._ensure_cache_dir()
    
    def _ensure_cache_dir(self):
        """Ensure cache directory exists."""
        self.cache_dir.mkdir(parents=True, exist_ok=True)
        
        # Load any existing download states
        state_file = self.cache_dir / "downloads.json"
        if state_file.exists():
            try:
                with open(state_file, 'r') as f:
                    states = json.load(f)
                    for download_id, info_dict in states.items():
                        self.downloads[download_id] = DownloadInfo.from_dict(info_dict)
                        # Reset incomplete downloads to IDLE
                        if self.downloads[download_id].state == DownloadState.DOWNLOADING:
                            self.downloads[download_id].state = DownloadState.IDLE
            except Exception as e:
                logger.error(f"Failed to load download states: {e}")
    
    def _save_states(self):
        """Save download states to disk."""
        state_file = self.cache_dir / "downloads.json"
        try:
            states = {
                download_id: info.to_dict()
                for download_id, info in self.downloads.items()
            }
            with open(state_file, 'w') as f:
                json.dump(states, f, indent=2)
        except Exception as e:
            logger.error(f"Failed to save download states: {e}")
    
    def _get_temp_path(self, filename: str, download_id: str) -> Path:
        """Get temporary file path for download."""
        return self.cache_dir / f"{filename}.{download_id}.partial"
    
    def _get_final_path(self, filename: str) -> Path:
        """Get final file path for completed download."""
        return self.cache_dir / filename
    
    async def _check_resume_support(self, session: aiohttp.ClientSession, url: str) -> Tuple[bool, int]:
        """Check if server supports resume and get file size.
        
        Returns:
            Tuple of (supports_resume, total_size)
        """
        try:
            async with session.head(url) as response:
                response.raise_for_status()
                
                # Check for range support
                accept_ranges = response.headers.get('Accept-Ranges', '').lower()
                supports_resume = accept_ranges == 'bytes'
                
                # Get content length
                content_length = response.headers.get('Content-Length')
                total_size = int(content_length) if content_length else 0
                
                return supports_resume, total_size
                
        except Exception as e:
            logger.warning(f"Failed to check resume support: {e}")
            return False, 0
    
    def _calculate_speed(self, download_id: str, bytes_downloaded: int, timestamp: float) -> float:
        """Calculate download speed using sliding window."""
        if download_id not in self.speed_samples:
            self.speed_samples[download_id] = []
        
        samples = self.speed_samples[download_id]
        samples.append((timestamp, bytes_downloaded))
        
        # Remove old samples outside window
        cutoff_time = timestamp - self.speed_window
        samples = [(t, b) for t, b in samples if t > cutoff_time]
        self.speed_samples[download_id] = samples
        
        if len(samples) < 2:
            return 0.0
        
        # Calculate speed over window
        time_diff = samples[-1][0] - samples[0][0]
        bytes_diff = samples[-1][1] - samples[0][1]
        
        if time_diff > 0:
            return bytes_diff / time_diff
        return 0.0
    
    def _calculate_eta(self, remaining_bytes: int, speed_bps: float) -> Optional[int]:
        """Calculate estimated time of arrival."""
        if speed_bps > 0:
            return int(remaining_bytes / speed_bps)
        return None
    
    async def _apply_bandwidth_limit(self, chunk_size: int, start_time: float):
        """Apply bandwidth limiting if configured."""
        if self.max_bandwidth is None:
            return
        
        # Calculate how long this chunk should take
        expected_duration = chunk_size / self.max_bandwidth
        actual_duration = time.time() - start_time
        
        # Sleep if we're going too fast
        if actual_duration < expected_duration:
            await asyncio.sleep(expected_duration - actual_duration)
    
    async def _download_file(self, download_id: str):
        """Internal download implementation."""
        info = self.downloads[download_id]
        
        try:
            # Create session
            session = aiohttp.ClientSession(timeout=self.timeout)
            self.sessions[download_id] = session
            
            # Prepare download
            info.state = DownloadState.PREPARING
            self._save_states()
            
            # Check resume support
            supports_resume, total_size = await self._check_resume_support(session, info.url)
            
            if total_size > 0:
                info.total_size = total_size
            
            # Setup paths
            temp_path = self._get_temp_path(info.filename, download_id)
            final_path = self._get_final_path(info.filename)
            info.temp_path = str(temp_path)
            info.final_path = str(final_path)
            
            # Check existing partial download
            start_byte = 0
            if temp_path.exists() and supports_resume:
                start_byte = temp_path.stat().st_size
                info.downloaded_size = start_byte
                logger.info(f"Resuming download from byte {start_byte}")
            
            # Prepare headers for resume
            headers = {}
            if start_byte > 0 and supports_resume:
                headers['Range'] = f'bytes={start_byte}-'
            
            # Start download
            info.state = DownloadState.DOWNLOADING
            info.start_time = time.time()
            self._save_states()
            
            # Download with progress tracking
            async with session.get(info.url, headers=headers) as response:
                response.raise_for_status()
                
                # Update total size if not set
                if info.total_size == 0:
                    content_length = response.headers.get('Content-Length')
                    if content_length:
                        info.total_size = int(content_length) + start_byte
                
                # Open file for writing (append mode for resume)
                mode = 'ab' if start_byte > 0 else 'wb'
                async with aiofiles.open(temp_path, mode) as file:
                    
                    # Read and write chunks
                    async for chunk in response.content.iter_chunked(self.chunk_size):
                        if info.state != DownloadState.DOWNLOADING:
                            break
                        
                        chunk_start = time.time()
                        
                        # Write chunk
                        await file.write(chunk)
                        info.downloaded_size += len(chunk)
                        
                        # Apply bandwidth limit
                        await self._apply_bandwidth_limit(len(chunk), chunk_start)
                        
                        # Calculate progress
                        progress = self._create_progress(download_id)
                        
                        # Call progress callback
                        if download_id in self.progress_callbacks:
                            callback = self.progress_callbacks[download_id]
                            try:
                                await callback(progress)
                            except Exception as e:
                                logger.error(f"Progress callback error: {e}")
                        
                        # Save state periodically
                        if info.downloaded_size % (self.chunk_size * 100) == 0:
                            self._save_states()
            
            # Verify if not cancelled
            if info.state == DownloadState.DOWNLOADING:
                info.state = DownloadState.VERIFYING
                self._save_states()
                
                # Verify integrity if checksum provided
                if info.checksum:
                    if await self._verify_checksum(temp_path, info.checksum, info.checksum_type):
                        logger.info(f"Checksum verification passed for {download_id}")
                    else:
                        raise DownloadError("Checksum verification failed")
                
                # Move to final location
                temp_path.rename(final_path)
                info.state = DownloadState.COMPLETE
                info.end_time = time.time()
                logger.info(f"Download completed: {download_id}")
            
        except asyncio.CancelledError:
            info.state = DownloadState.CANCELLED
            info.error = "Download cancelled"
            logger.info(f"Download cancelled: {download_id}")
            
        except Exception as e:
            info.state = DownloadState.FAILED
            info.error = str(e)
            logger.error(f"Download failed: {download_id} - {e}")
            
        finally:
            # Cleanup
            if download_id in self.sessions:
                await self.sessions[download_id].close()
                del self.sessions[download_id]
            
            if download_id in self.tasks:
                del self.tasks[download_id]
            
            if download_id in self.speed_samples:
                del self.speed_samples[download_id]
            
            self._save_states()
    
    async def _verify_checksum(self, file_path: Path, expected: str, algorithm: str = "sha256") -> bool:
        """Verify file checksum."""
        try:
            hasher = hashlib.new(algorithm)
            
            async with aiofiles.open(file_path, 'rb') as f:
                while chunk := await f.read(self.chunk_size):
                    hasher.update(chunk)
            
            calculated = hasher.hexdigest()
            return calculated.lower() == expected.lower()
            
        except Exception as e:
            logger.error(f"Checksum verification error: {e}")
            return False
    
    def _create_progress(self, download_id: str) -> DownloadProgress:
        """Create progress object for download."""
        info = self.downloads[download_id]
        
        current_time = time.time()
        elapsed = current_time - info.start_time if info.start_time else 0
        
        # Calculate speed
        speed = self._calculate_speed(download_id, info.downloaded_size, current_time)
        
        # Calculate progress
        progress_percent = 0
        if info.total_size > 0:
            progress_percent = (info.downloaded_size / info.total_size) * 100
        
        # Calculate ETA
        remaining = info.total_size - info.downloaded_size
        eta = self._calculate_eta(remaining, speed)
        
        return DownloadProgress(
            state=info.state,
            progress_percent=progress_percent,
            downloaded_bytes=info.downloaded_size,
            total_bytes=info.total_size,
            speed_bps=speed,
            eta_seconds=eta,
            elapsed_seconds=elapsed
        )
    
    async def start_download(self, 
                            url: str,
                            filename: Optional[str] = None,
                            checksum: Optional[str] = None,
                            checksum_type: str = "sha256",
                            progress_callback: Optional[Callable] = None) -> str:
        """Start a new download or resume existing one.
        
        Args:
            url: URL to download from
            filename: Target filename (extracted from URL if not provided)
            checksum: Expected checksum for verification
            checksum_type: Type of checksum (md5, sha1, sha256)
            progress_callback: Async callback for progress updates
            
        Returns:
            Download ID for tracking
        """
        # Generate filename if not provided
        if not filename:
            filename = url.split('/')[-1] or "download"
        
        # Generate download ID
        download_id = hashlib.md5(f"{url}:{filename}".encode()).hexdigest()[:12]
        
        # Check if already downloading
        if download_id in self.tasks:
            logger.info(f"Download already in progress: {download_id}")
            return download_id
        
        # Create or update download info
        if download_id not in self.downloads:
            self.downloads[download_id] = DownloadInfo(
                url=url,
                filename=filename,
                total_size=0,
                downloaded_size=0,
                checksum=checksum,
                checksum_type=checksum_type
            )
        else:
            # Resume existing download
            info = self.downloads[download_id]
            if info.state in [DownloadState.COMPLETE]:
                logger.info(f"Download already complete: {download_id}")
                return download_id
        
        # Set progress callback
        if progress_callback:
            self.progress_callbacks[download_id] = progress_callback
        
        # Start download task
        task = asyncio.create_task(self._download_file(download_id))
        self.tasks[download_id] = task
        
        logger.info(f"Started download: {download_id} - {url}")
        return download_id
    
    async def pause_download(self, download_id: str):
        """Pause an active download."""
        if download_id not in self.downloads:
            raise DownloadError(f"Download not found: {download_id}")
        
        info = self.downloads[download_id]
        if info.state == DownloadState.DOWNLOADING:
            info.state = DownloadState.PAUSED
            self._save_states()
            
            # Cancel the task
            if download_id in self.tasks:
                self.tasks[download_id].cancel()
            
            logger.info(f"Paused download: {download_id}")
    
    async def resume_download(self, download_id: str):
        """Resume a paused download."""
        if download_id not in self.downloads:
            raise DownloadError(f"Download not found: {download_id}")
        
        info = self.downloads[download_id]
        if info.state in [DownloadState.PAUSED, DownloadState.FAILED, DownloadState.IDLE]:
            # Restart download task
            task = asyncio.create_task(self._download_file(download_id))
            self.tasks[download_id] = task
            
            logger.info(f"Resumed download: {download_id}")
    
    async def cancel_download(self, download_id: str, cleanup: bool = True):
        """Cancel a download and optionally cleanup files."""
        if download_id not in self.downloads:
            raise DownloadError(f"Download not found: {download_id}")
        
        info = self.downloads[download_id]
        info.state = DownloadState.CANCELLED
        self._save_states()
        
        # Cancel task
        if download_id in self.tasks:
            self.tasks[download_id].cancel()
        
        # Cleanup files if requested
        if cleanup and info.temp_path:
            temp_path = Path(info.temp_path)
            if temp_path.exists():
                temp_path.unlink()
                logger.info(f"Deleted partial file: {temp_path}")
        
        logger.info(f"Cancelled download: {download_id}")
    
    def get_download_info(self, download_id: str) -> Optional[DownloadInfo]:
        """Get download information."""
        return self.downloads.get(download_id)
    
    def get_download_progress(self, download_id: str) -> Optional[DownloadProgress]:
        """Get current download progress."""
        if download_id not in self.downloads:
            return None
        
        return self._create_progress(download_id)
    
    def list_downloads(self) -> Dict[str, DownloadInfo]:
        """List all tracked downloads."""
        return self.downloads.copy()
    
    async def cleanup_completed(self, older_than_hours: int = 24):
        """Clean up completed downloads older than specified hours."""
        cutoff_time = time.time() - (older_than_hours * 3600)
        
        for download_id, info in list(self.downloads.items()):
            if info.state == DownloadState.COMPLETE and info.end_time:
                if info.end_time < cutoff_time:
                    # Delete the file
                    if info.final_path:
                        final_path = Path(info.final_path)
                        if final_path.exists():
                            final_path.unlink()
                            logger.info(f"Deleted old download: {final_path}")
                    
                    # Remove from tracking
                    del self.downloads[download_id]
        
        self._save_states()