"""
Backup and restore functionality for CodeMCP storage layer.

Provides comprehensive backup and restore operations for call trees,
analysis results, and all related data with integrity verification.
"""

import asyncio
import gzip
import json
import shutil
import sqlite3
import tarfile
import tempfile
from datetime import datetime
from pathlib import Path
from typing import Dict, Any, List, Optional, Union, IO
import hashlib
import os

from .database import DatabaseManager
from .models import CallTreeModel, CallTreeNodeModel, AnalysisResultModel, FileChangeModel
from .serialization import CallTreeSerializer
from ..core.config import Config
from ..core.error_handler import CodeMCPError, log_info, log_warning, log_debug, handle_error


class BackupError(CodeMCPError):
    """Backup-specific errors."""
    
    def __init__(self, message: str, operation: str = None, **kwargs):
        super().__init__(message, "BACKUP_ERROR", **kwargs)
        if operation:
            self.details["operation"] = operation


class BackupManager:
    """
    Comprehensive backup and restore manager for CodeMCP data.
    
    Supports multiple backup formats and provides integrity verification,
    incremental backups, and restore operations.
    """
    
    def __init__(self, config: Config = None, db_manager: DatabaseManager = None):
        self.config = config or Config()
        self.db_manager = db_manager
        
        # Backup configuration
        self.backup_dir = Path.home() / ".codemcp" / "backups"
        self.backup_dir.mkdir(parents=True, exist_ok=True)
        
        # Serialization for call trees
        self.serializer = None
        if db_manager:
            self.serializer = CallTreeSerializer(db_manager)
        
        # Backup statistics
        self.stats = {
            "backups_created": 0,
            "backups_restored": 0,
            "backup_failures": 0,
            "restore_failures": 0,
            "last_backup": None,
            "last_restore": None
        }
        
        log_info(f"Backup manager initialized (backup dir: {self.backup_dir})")
    
    def _generate_backup_filename(self, backup_type: str = "full", 
                                 timestamp: datetime = None) -> str:
        """Generate backup filename with timestamp."""
        if timestamp is None:
            timestamp = datetime.utcnow()
        
        timestamp_str = timestamp.strftime("%Y%m%d_%H%M%S")
        return f"codemcp_{backup_type}_backup_{timestamp_str}.tar.gz"
    
    def _calculate_file_hash(self, file_path: Path) -> str:
        """Calculate SHA-256 hash of a file."""
        hash_sha256 = hashlib.sha256()
        with open(file_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_sha256.update(chunk)
        return hash_sha256.hexdigest()
    
    async def create_full_backup(self, 
                               backup_name: Optional[str] = None,
                               compress: bool = True,
                               include_cache: bool = False) -> Dict[str, Any]:
        """
        Create a full backup of all CodeMCP data.
        
        Args:
            backup_name: Custom backup name (optional)
            compress: Whether to compress the backup
            include_cache: Whether to include cache data
            
        Returns:
            Backup information dictionary
        """
        try:
            backup_timestamp = datetime.utcnow()
            
            if backup_name:
                backup_filename = f"{backup_name}.tar.gz" if compress else backup_name
            else:
                backup_filename = self._generate_backup_filename("full", backup_timestamp)
            
            backup_path = self.backup_dir / backup_filename
            
            log_info(f"Creating full backup: {backup_filename}")
            
            # Create temporary directory for staging backup data
            with tempfile.TemporaryDirectory() as temp_dir:
                temp_path = Path(temp_dir)
                backup_staging = temp_path / "backup"
                backup_staging.mkdir()
                
                # Backup metadata
                metadata = {
                    "backup_type": "full",
                    "timestamp": backup_timestamp.isoformat(),
                    "codemcp_version": "1.0.0",
                    "include_cache": include_cache,
                    "database_url": self.db_manager._sanitize_url(self.db_manager.database_url) if self.db_manager else None
                }
                
                # 1. Backup database
                database_backup_path = await self._backup_database(backup_staging)
                if database_backup_path:
                    metadata["database_backup"] = database_backup_path.name
                
                # 2. Backup call trees (JSON format for portability)
                call_trees_backup_path = await self._backup_call_trees(backup_staging)
                if call_trees_backup_path:
                    metadata["call_trees_backup"] = call_trees_backup_path.name
                
                # 3. Backup analysis results
                analysis_backup_path = await self._backup_analysis_results(backup_staging)
                if analysis_backup_path:
                    metadata["analysis_backup"] = analysis_backup_path.name
                
                # 4. Backup file change tracking
                file_changes_backup_path = await self._backup_file_changes(backup_staging)
                if file_changes_backup_path:
                    metadata["file_changes_backup"] = file_changes_backup_path.name
                
                # 5. Backup cache data (if requested)
                if include_cache:
                    cache_backup_path = await self._backup_cache_data(backup_staging)
                    if cache_backup_path:
                        metadata["cache_backup"] = cache_backup_path.name
                
                # Write metadata
                metadata_path = backup_staging / "backup_metadata.json"
                with open(metadata_path, 'w') as f:
                    json.dump(metadata, f, indent=2, default=str)
                
                # Create the backup archive
                if compress:
                    with tarfile.open(backup_path, "w:gz") as tar:
                        tar.add(backup_staging, arcname=".")
                else:
                    shutil.copytree(backup_staging, backup_path)
                
                # Calculate backup hash
                backup_hash = self._calculate_file_hash(backup_path)
                
                # Update statistics
                self.stats["backups_created"] += 1
                self.stats["last_backup"] = backup_timestamp.isoformat()
                
                backup_info = {
                    "backup_path": str(backup_path),
                    "backup_filename": backup_filename,
                    "backup_size": backup_path.stat().st_size,
                    "backup_hash": backup_hash,
                    "timestamp": backup_timestamp.isoformat(),
                    "metadata": metadata
                }
                
                log_info(f"Full backup created successfully: {backup_filename} ({backup_info['backup_size']} bytes)")
                return backup_info
                
        except Exception as e:
            self.stats["backup_failures"] += 1
            error_response = handle_error(e, {
                "operation": "create_full_backup",
                "backup_name": backup_name
            })
            raise BackupError(f"Failed to create full backup: {error_response['message']}")
    
    async def _backup_database(self, backup_dir: Path) -> Optional[Path]:
        """Backup the SQLite database file."""
        if not self.db_manager or not self.db_manager.is_sqlite:
            log_debug("Skipping database backup (not SQLite or no database manager)")
            return None
        
        try:
            # Extract database path from URL
            db_url = self.db_manager.database_url
            if "sqlite+aiosqlite:///" in db_url:
                db_path_str = db_url.replace("sqlite+aiosqlite:///", "")
                db_path = Path(db_path_str)
                
                if db_path.exists():
                    backup_db_path = backup_dir / "database.sqlite"
                    shutil.copy2(db_path, backup_db_path)
                    log_debug(f"Database backed up to: {backup_db_path}")
                    return backup_db_path
            
            return None
            
        except Exception as e:
            log_warning(f"Failed to backup database: {e}")
            return None
    
    async def _backup_call_trees(self, backup_dir: Path) -> Optional[Path]:
        """Backup call trees in JSON format."""
        if not self.db_manager or not self.serializer:
            return None
        
        try:
            # Get all call trees
            call_tree_models = await self.db_manager.get_by_filter(CallTreeModel)
            
            if not call_tree_models:
                log_debug("No call trees to backup")
                return None
            
            call_trees_data = []
            
            for tree_model in call_tree_models:
                # Deserialize to CallTree object
                call_tree = await self.serializer.deserialize_call_tree(tree_model)
                
                # Convert to JSON-serializable format
                tree_data = {
                    "tree_id": tree_model.id,
                    "tree_metadata": tree_model.to_dict(),
                    "tree_json": self.serializer.export_to_json(call_tree, include_metadata=True)
                }
                call_trees_data.append(tree_data)
            
            # Save call trees data
            backup_path = backup_dir / "call_trees.json"
            with open(backup_path, 'w') as f:
                json.dump(call_trees_data, f, indent=2, default=str)
            
            log_debug(f"Backed up {len(call_trees_data)} call trees")
            return backup_path
            
        except Exception as e:
            log_warning(f"Failed to backup call trees: {e}")
            return None
    
    async def _backup_analysis_results(self, backup_dir: Path) -> Optional[Path]:
        """Backup analysis results."""
        if not self.db_manager:
            return None
        
        try:
            # Get all analysis results
            analysis_models = await self.db_manager.get_by_filter(AnalysisResultModel)
            
            if not analysis_models:
                log_debug("No analysis results to backup")
                return None
            
            analysis_data = [model.to_dict() for model in analysis_models]
            
            # Save analysis results
            backup_path = backup_dir / "analysis_results.json"
            with open(backup_path, 'w') as f:
                json.dump(analysis_data, f, indent=2, default=str)
            
            log_debug(f"Backed up {len(analysis_data)} analysis results")
            return backup_path
            
        except Exception as e:
            log_warning(f"Failed to backup analysis results: {e}")
            return None
    
    async def _backup_file_changes(self, backup_dir: Path) -> Optional[Path]:
        """Backup file change tracking data."""
        if not self.db_manager:
            return None
        
        try:
            # Get all file changes
            file_change_models = await self.db_manager.get_by_filter(FileChangeModel)
            
            if not file_change_models:
                log_debug("No file changes to backup")
                return None
            
            file_changes_data = [model.to_dict() for model in file_change_models]
            
            # Save file changes
            backup_path = backup_dir / "file_changes.json"
            with open(backup_path, 'w') as f:
                json.dump(file_changes_data, f, indent=2, default=str)
            
            log_debug(f"Backed up {len(file_changes_data)} file change records")
            return backup_path
            
        except Exception as e:
            log_warning(f"Failed to backup file changes: {e}")
            return None
    
    async def _backup_cache_data(self, backup_dir: Path) -> Optional[Path]:
        """Backup cache data."""
        # Cache data is typically ephemeral, but we can backup it for completeness
        # This would backup the database cache entries
        if not self.db_manager:
            return None
        
        try:
            from .models import CacheEntryModel
            
            # Get all cache entries
            cache_models = await self.db_manager.get_by_filter(CacheEntryModel)
            
            if not cache_models:
                log_debug("No cache data to backup")
                return None
            
            cache_data = [model.to_dict() for model in cache_models]
            
            # Save cache data
            backup_path = backup_dir / "cache_data.json"
            with open(backup_path, 'w') as f:
                json.dump(cache_data, f, indent=2, default=str)
            
            log_debug(f"Backed up {len(cache_data)} cache entries")
            return backup_path
            
        except Exception as e:
            log_warning(f"Failed to backup cache data: {e}")
            return None
    
    async def restore_from_backup(self, 
                                backup_path: Union[str, Path],
                                restore_options: Optional[Dict[str, bool]] = None) -> Dict[str, Any]:
        """
        Restore data from a backup file.
        
        Args:
            backup_path: Path to backup file
            restore_options: Options for what to restore (defaults to all)
            
        Returns:
            Restore information dictionary
        """
        try:
            backup_path = Path(backup_path)
            restore_timestamp = datetime.utcnow()
            
            if not backup_path.exists():
                raise BackupError(f"Backup file not found: {backup_path}")
            
            # Default restore options
            if restore_options is None:
                restore_options = {
                    "restore_database": True,
                    "restore_call_trees": True,
                    "restore_analysis_results": True,
                    "restore_file_changes": True,
                    "restore_cache": False  # Usually don't restore cache
                }
            
            log_info(f"Restoring from backup: {backup_path}")
            
            # Create temporary directory for extracting backup
            with tempfile.TemporaryDirectory() as temp_dir:
                temp_path = Path(temp_dir)
                extract_dir = temp_path / "extract"
                extract_dir.mkdir()
                
                # Extract backup
                if backup_path.suffix == ".gz":
                    with tarfile.open(backup_path, "r:gz") as tar:
                        tar.extractall(extract_dir)
                else:
                    shutil.copytree(backup_path, extract_dir / "backup")
                    extract_dir = extract_dir / "backup"
                
                # Read backup metadata
                metadata_path = extract_dir / "backup_metadata.json"
                if not metadata_path.exists():
                    raise BackupError("Backup metadata not found - invalid backup file")
                
                with open(metadata_path, 'r') as f:
                    metadata = json.load(f)
                
                restore_results = {
                    "backup_metadata": metadata,
                    "restore_timestamp": restore_timestamp.isoformat(),
                    "restored_components": []
                }
                
                # Restore database if requested
                if restore_options.get("restore_database", True):
                    if await self._restore_database(extract_dir, metadata):
                        restore_results["restored_components"].append("database")
                
                # Restore call trees if requested
                if restore_options.get("restore_call_trees", True):
                    if await self._restore_call_trees(extract_dir, metadata):
                        restore_results["restored_components"].append("call_trees")
                
                # Restore analysis results if requested
                if restore_options.get("restore_analysis_results", True):
                    if await self._restore_analysis_results(extract_dir, metadata):
                        restore_results["restored_components"].append("analysis_results")
                
                # Restore file changes if requested
                if restore_options.get("restore_file_changes", True):
                    if await self._restore_file_changes(extract_dir, metadata):
                        restore_results["restored_components"].append("file_changes")
                
                # Restore cache if requested
                if restore_options.get("restore_cache", False):
                    if await self._restore_cache_data(extract_dir, metadata):
                        restore_results["restored_components"].append("cache")
                
                # Update statistics
                self.stats["backups_restored"] += 1
                self.stats["last_restore"] = restore_timestamp.isoformat()
                
                log_info(f"Restore completed successfully. Restored: {', '.join(restore_results['restored_components'])}")
                return restore_results
                
        except Exception as e:
            self.stats["restore_failures"] += 1
            error_response = handle_error(e, {
                "operation": "restore_from_backup",
                "backup_path": str(backup_path)
            })
            raise BackupError(f"Failed to restore from backup: {error_response['message']}")
    
    async def _restore_database(self, extract_dir: Path, metadata: Dict[str, Any]) -> bool:
        """Restore database from backup."""
        if not self.db_manager or not self.db_manager.is_sqlite:
            log_debug("Skipping database restore (not SQLite or no database manager)")
            return False
        
        try:
            database_backup_file = metadata.get("database_backup")
            if not database_backup_file:
                log_debug("No database backup found in metadata")
                return False
            
            backup_db_path = extract_dir / database_backup_file
            if not backup_db_path.exists():
                log_warning(f"Database backup file not found: {backup_db_path}")
                return False
            
            # Extract current database path
            db_url = self.db_manager.database_url
            if "sqlite+aiosqlite:///" in db_url:
                current_db_path_str = db_url.replace("sqlite+aiosqlite:///", "")
                current_db_path = Path(current_db_path_str)
                
                # Close current database connections
                await self.db_manager.close()
                
                # Backup current database
                if current_db_path.exists():
                    backup_current = current_db_path.with_suffix(".db.backup")
                    shutil.copy2(current_db_path, backup_current)
                    log_debug(f"Backed up current database to: {backup_current}")
                
                # Copy restored database
                shutil.copy2(backup_db_path, current_db_path)
                
                # Reinitialize database manager
                await self.db_manager.initialize()
                
                log_debug("Database restored successfully")
                return True
            
            return False
            
        except Exception as e:
            log_warning(f"Failed to restore database: {e}")
            return False
    
    async def _restore_call_trees(self, extract_dir: Path, metadata: Dict[str, Any]) -> bool:
        """Restore call trees from backup."""
        if not self.db_manager or not self.serializer:
            return False
        
        try:
            call_trees_backup_file = metadata.get("call_trees_backup")
            if not call_trees_backup_file:
                log_debug("No call trees backup found in metadata")
                return False
            
            backup_file_path = extract_dir / call_trees_backup_file
            if not backup_file_path.exists():
                log_warning(f"Call trees backup file not found: {backup_file_path}")
                return False
            
            with open(backup_file_path, 'r') as f:
                call_trees_data = json.load(f)
            
            restored_count = 0
            for tree_data in call_trees_data:
                try:
                    # Import call tree from JSON
                    call_tree = self.serializer.import_from_json(tree_data["tree_json"])
                    
                    # Serialize to database
                    tree_metadata = tree_data["tree_metadata"]
                    await self.serializer.serialize_call_tree(
                        call_tree, 
                        tree_metadata.get("analysis_type", "unknown")
                    )
                    
                    restored_count += 1
                    
                except Exception as e:
                    log_warning(f"Failed to restore call tree {tree_data.get('tree_id', 'unknown')}: {e}")
            
            log_debug(f"Restored {restored_count} call trees")
            return restored_count > 0
            
        except Exception as e:
            log_warning(f"Failed to restore call trees: {e}")
            return False
    
    async def _restore_analysis_results(self, extract_dir: Path, metadata: Dict[str, Any]) -> bool:
        """Restore analysis results from backup."""
        if not self.db_manager:
            return False
        
        try:
            analysis_backup_file = metadata.get("analysis_backup")
            if not analysis_backup_file:
                log_debug("No analysis results backup found in metadata")
                return False
            
            backup_file_path = extract_dir / analysis_backup_file
            if not backup_file_path.exists():
                log_warning(f"Analysis results backup file not found: {backup_file_path}")
                return False
            
            with open(backup_file_path, 'r') as f:
                analysis_data = json.load(f)
            
            restored_count = 0
            for result_data in analysis_data:
                try:
                    # Create analysis result model
                    analysis_model = AnalysisResultModel(**{
                        k: v for k, v in result_data.items() 
                        if k not in ['id', 'created_at']  # Skip ID and created_at
                    })
                    
                    await self.db_manager.create(analysis_model)
                    restored_count += 1
                    
                except Exception as e:
                    log_warning(f"Failed to restore analysis result: {e}")
            
            log_debug(f"Restored {restored_count} analysis results")
            return restored_count > 0
            
        except Exception as e:
            log_warning(f"Failed to restore analysis results: {e}")
            return False
    
    async def _restore_file_changes(self, extract_dir: Path, metadata: Dict[str, Any]) -> bool:
        """Restore file changes from backup."""
        if not self.db_manager:
            return False
        
        try:
            file_changes_backup_file = metadata.get("file_changes_backup")
            if not file_changes_backup_file:
                log_debug("No file changes backup found in metadata")
                return False
            
            backup_file_path = extract_dir / file_changes_backup_file
            if not backup_file_path.exists():
                log_warning(f"File changes backup file not found: {backup_file_path}")
                return False
            
            with open(backup_file_path, 'r') as f:
                file_changes_data = json.load(f)
            
            restored_count = 0
            for change_data in file_changes_data:
                try:
                    # Create file change model
                    file_change_model = FileChangeModel(**{
                        k: v for k, v in change_data.items() 
                        if k not in ['id', 'created_at', 'updated_at']
                    })
                    
                    await self.db_manager.create(file_change_model)
                    restored_count += 1
                    
                except Exception as e:
                    log_warning(f"Failed to restore file change record: {e}")
            
            log_debug(f"Restored {restored_count} file change records")
            return restored_count > 0
            
        except Exception as e:
            log_warning(f"Failed to restore file changes: {e}")
            return False
    
    async def _restore_cache_data(self, extract_dir: Path, metadata: Dict[str, Any]) -> bool:
        """Restore cache data from backup."""
        if not self.db_manager:
            return False
        
        try:
            from .models import CacheEntryModel
            
            cache_backup_file = metadata.get("cache_backup")
            if not cache_backup_file:
                log_debug("No cache data backup found in metadata")
                return False
            
            backup_file_path = extract_dir / cache_backup_file
            if not backup_file_path.exists():
                log_warning(f"Cache data backup file not found: {backup_file_path}")
                return False
            
            with open(backup_file_path, 'r') as f:
                cache_data = json.load(f)
            
            restored_count = 0
            for entry_data in cache_data:
                try:
                    # Create cache entry model
                    cache_model = CacheEntryModel(**{
                        k: v for k, v in entry_data.items() 
                        if k not in ['created_at', 'last_accessed']
                    })
                    
                    await self.db_manager.create(cache_model)
                    restored_count += 1
                    
                except Exception as e:
                    log_warning(f"Failed to restore cache entry: {e}")
            
            log_debug(f"Restored {restored_count} cache entries")
            return restored_count > 0
            
        except Exception as e:
            log_warning(f"Failed to restore cache data: {e}")
            return False
    
    async def list_backups(self) -> List[Dict[str, Any]]:
        """List all available backups."""
        try:
            backups = []
            
            for backup_file in self.backup_dir.glob("*.tar.gz"):
                try:
                    stat = backup_file.stat()
                    backup_info = {
                        "filename": backup_file.name,
                        "path": str(backup_file),
                        "size": stat.st_size,
                        "created": datetime.fromtimestamp(stat.st_ctime).isoformat(),
                        "modified": datetime.fromtimestamp(stat.st_mtime).isoformat(),
                        "hash": self._calculate_file_hash(backup_file)
                    }
                    
                    # Try to extract metadata if possible
                    try:
                        with tarfile.open(backup_file, "r:gz") as tar:
                            if "backup_metadata.json" in tar.getnames():
                                metadata_file = tar.extractfile("backup_metadata.json")
                                metadata = json.load(metadata_file)
                                backup_info["metadata"] = metadata
                    except Exception:
                        pass  # Ignore metadata extraction errors
                    
                    backups.append(backup_info)
                    
                except Exception as e:
                    log_warning(f"Failed to get info for backup {backup_file}: {e}")
            
            # Sort by creation time (newest first)
            backups.sort(key=lambda x: x["created"], reverse=True)
            
            return backups
            
        except Exception as e:
            error_response = handle_error(e, {"operation": "list_backups"})
            raise BackupError(f"Failed to list backups: {error_response['message']}")
    
    async def delete_backup(self, backup_filename: str) -> bool:
        """Delete a backup file."""
        try:
            backup_path = self.backup_dir / backup_filename
            
            if not backup_path.exists():
                log_warning(f"Backup file not found: {backup_filename}")
                return False
            
            backup_path.unlink()
            log_info(f"Deleted backup: {backup_filename}")
            return True
            
        except Exception as e:
            error_response = handle_error(e, {
                "operation": "delete_backup",
                "backup_filename": backup_filename
            })
            raise BackupError(f"Failed to delete backup: {error_response['message']}")
    
    async def verify_backup(self, backup_path: Union[str, Path]) -> Dict[str, Any]:
        """Verify backup integrity."""
        try:
            backup_path = Path(backup_path)
            
            if not backup_path.exists():
                return {"valid": False, "error": "Backup file not found"}
            
            verification_result = {
                "valid": True,
                "file_hash": self._calculate_file_hash(backup_path),
                "file_size": backup_path.stat().st_size,
                "can_extract": False,
                "has_metadata": False,
                "metadata": None,
                "errors": []
            }
            
            # Try to extract and verify contents
            try:
                with tarfile.open(backup_path, "r:gz") as tar:
                    verification_result["can_extract"] = True
                    
                    # Check for metadata
                    if "backup_metadata.json" in tar.getnames():
                        verification_result["has_metadata"] = True
                        metadata_file = tar.extractfile("backup_metadata.json")
                        metadata = json.load(metadata_file)
                        verification_result["metadata"] = metadata
                    
            except Exception as e:
                verification_result["valid"] = False
                verification_result["errors"].append(f"Extraction error: {str(e)}")
            
            return verification_result
            
        except Exception as e:
            error_response = handle_error(e, {
                "operation": "verify_backup",
                "backup_path": str(backup_path)
            })
            return {
                "valid": False,
                "error": error_response["message"]
            }
    
    async def get_backup_stats(self) -> Dict[str, Any]:
        """Get backup statistics."""
        try:
            backups = await self.list_backups()
            total_size = sum(backup["size"] for backup in backups)
            
            return {
                "backup_stats": self.stats.copy(),
                "backup_directory": str(self.backup_dir),
                "total_backups": len(backups),
                "total_size_bytes": total_size,
                "total_size_human": self._format_size(total_size),
                "newest_backup": backups[0]["filename"] if backups else None,
                "oldest_backup": backups[-1]["filename"] if backups else None
            }
            
        except Exception as e:
            error_response = handle_error(e, {"operation": "get_backup_stats"})
            return {
                "backup_stats": self.stats.copy(),
                "error": error_response["message"]
            }
    
    def _format_size(self, size_bytes: int) -> str:
        """Format size in human readable format."""
        for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
            if size_bytes < 1024.0:
                return f"{size_bytes:.1f} {unit}"
            size_bytes /= 1024.0
        return f"{size_bytes:.1f} PB"