"""
File change detection and cache invalidation for CodeMCP.

Monitors file system changes and automatically invalidates related
cache entries and analysis results to ensure data consistency.
"""

import asyncio
import hashlib
import os
import time
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, Any, List, Optional, Set, Callable, Tuple
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileModifiedEvent, FileCreatedEvent, FileDeletedEvent
import fnmatch

from .database import DatabaseManager
from .cache import CacheManager
from .models import FileChangeModel, AnalysisResultModel, CallTreeModel
from ..core.config import Config
from ..core.error_handler import CodeMCPError, log_info, log_warning, log_debug, handle_error


class FileWatcherError(CodeMCPError):
    """File watcher specific errors."""
    
    def __init__(self, message: str, operation: str = None, **kwargs):
        super().__init__(message, "FILE_WATCHER_ERROR", **kwargs)
        if operation:
            self.details["operation"] = operation


class FileChangeHandler(FileSystemEventHandler):
    """Handles file system events for cache invalidation."""
    
    def __init__(self, file_watcher: 'FileChangeDetector'):
        self.file_watcher = file_watcher
        
    def on_modified(self, event):
        """Handle file modification events."""
        if not event.is_directory:
            asyncio.create_task(self.file_watcher._handle_file_change(event.src_path, "modified"))
    
    def on_created(self, event):
        """Handle file creation events."""
        if not event.is_directory:
            asyncio.create_task(self.file_watcher._handle_file_change(event.src_path, "created"))
    
    def on_deleted(self, event):
        """Handle file deletion events."""
        if not event.is_directory:
            asyncio.create_task(self.file_watcher._handle_file_change(event.src_path, "deleted"))
    
    def on_moved(self, event):
        """Handle file move/rename events."""
        if not event.is_directory:
            # Handle as deletion of old path and creation of new path
            asyncio.create_task(self.file_watcher._handle_file_change(event.src_path, "deleted"))
            asyncio.create_task(self.file_watcher._handle_file_change(event.dest_path, "moved"))


class FileChangeDetector:
    """
    File change detection and cache invalidation system.
    
    Monitors specified directories for file changes and automatically
    invalidates related cache entries and analysis results.
    """
    
    def __init__(self, 
                 config: Config = None,
                 db_manager: DatabaseManager = None,
                 cache_manager: CacheManager = None):
        self.config = config or Config()
        self.db_manager = db_manager
        self.cache_manager = cache_manager
        
        # File watching configuration
        self.watched_directories: Set[Path] = set()
        self.ignore_patterns: List[str] = [
            "*.tmp", "*.log", "*.pyc", "__pycache__/*", 
            ".git/*", ".svn/*", "node_modules/*",
            "*.swp", "*.swo", "*~"
        ]
        
        # File system observer
        self.observer = Observer()
        self.event_handler = FileChangeHandler(self)
        self.is_watching = False
        
        # Change tracking
        self.pending_changes: Dict[str, Dict[str, Any]] = {}
        self.batch_processing_interval = 5.0  # seconds
        self.last_batch_time = time.time()
        
        # Invalidation callbacks
        self.invalidation_callbacks: List[Callable[[List[str]], None]] = []
        
        # Statistics
        self.stats = {
            "files_watched": 0,
            "changes_detected": 0,
            "cache_invalidations": 0,
            "analysis_invalidations": 0,
            "errors_encountered": 0,
            "last_change": None
        }
        
        # Background tasks
        self._batch_processor_task = None
        self._periodic_scan_task = None
        
        log_info("File change detector initialized")
    
    async def initialize(self):
        """Initialize the file change detector."""
        try:
            # Start background tasks
            self._batch_processor_task = asyncio.create_task(self._batch_processor())
            self._periodic_scan_task = asyncio.create_task(self._periodic_scan())
            
            log_info("File change detector initialized successfully")
            
        except Exception as e:
            error_response = handle_error(e, {"operation": "file_watcher_init"})
            raise FileWatcherError(f"Failed to initialize file change detector: {error_response['message']}")
    
    async def start_watching(self, directories: List[str] = None):
        """Start watching specified directories for changes."""
        try:
            if directories:
                for directory in directories:
                    await self.add_watch_directory(directory)
            
            if self.watched_directories:
                self.observer.start()
                self.is_watching = True
                log_info(f"Started watching {len(self.watched_directories)} directories")
            else:
                log_warning("No directories to watch")
                
        except Exception as e:
            error_response = handle_error(e, {"operation": "start_watching"})
            raise FileWatcherError(f"Failed to start watching: {error_response['message']}")
    
    async def stop_watching(self):
        """Stop watching for file changes."""
        try:
            if self.is_watching:
                self.observer.stop()
                self.observer.join()
                self.is_watching = False
                log_info("Stopped watching for file changes")
            
            # Cancel background tasks
            if self._batch_processor_task:
                self._batch_processor_task.cancel()
            if self._periodic_scan_task:
                self._periodic_scan_task.cancel()
                
        except Exception as e:
            error_response = handle_error(e, {"operation": "stop_watching"})
            raise FileWatcherError(f"Failed to stop watching: {error_response['message']}")
    
    async def add_watch_directory(self, directory: str):
        """Add a directory to watch for changes."""
        try:
            directory_path = Path(directory).resolve()
            
            if not directory_path.exists():
                log_warning(f"Directory does not exist: {directory_path}")
                return
            
            if not directory_path.is_dir():
                log_warning(f"Path is not a directory: {directory_path}")
                return
            
            if directory_path in self.watched_directories:
                log_debug(f"Directory already being watched: {directory_path}")
                return
            
            # Add to observer
            self.observer.schedule(self.event_handler, str(directory_path), recursive=True)
            self.watched_directories.add(directory_path)
            
            # Scan existing files to build initial state
            await self._scan_directory(directory_path)
            
            log_info(f"Added watch directory: {directory_path}")
            
        except Exception as e:
            error_response = handle_error(e, {
                "operation": "add_watch_directory",
                "directory": directory
            })
            raise FileWatcherError(f"Failed to add watch directory: {error_response['message']}")
    
    async def remove_watch_directory(self, directory: str):
        """Remove a directory from watching."""
        try:
            directory_path = Path(directory).resolve()
            
            if directory_path in self.watched_directories:
                # Note: watchdog doesn't provide a direct way to remove specific watches
                # We'd need to restart the observer with updated watches
                self.watched_directories.discard(directory_path)
                log_info(f"Removed watch directory: {directory_path}")
            
        except Exception as e:
            error_response = handle_error(e, {
                "operation": "remove_watch_directory", 
                "directory": directory
            })
            raise FileWatcherError(f"Failed to remove watch directory: {error_response['message']}")
    
    def add_ignore_pattern(self, pattern: str):
        """Add a file pattern to ignore."""
        if pattern not in self.ignore_patterns:
            self.ignore_patterns.append(pattern)
            log_debug(f"Added ignore pattern: {pattern}")
    
    def remove_ignore_pattern(self, pattern: str):
        """Remove a file pattern from ignore list."""
        if pattern in self.ignore_patterns:
            self.ignore_patterns.remove(pattern)
            log_debug(f"Removed ignore pattern: {pattern}")
    
    def _should_ignore_file(self, file_path: str) -> bool:
        """Check if a file should be ignored based on patterns."""
        file_path = str(file_path)
        
        for pattern in self.ignore_patterns:
            if fnmatch.fnmatch(file_path, pattern) or fnmatch.fnmatch(os.path.basename(file_path), pattern):
                return True
        
        return False
    
    async def _handle_file_change(self, file_path: str, change_type: str):
        """Handle a file change event."""
        try:
            if self._should_ignore_file(file_path):
                return
            
            file_path = str(Path(file_path).resolve())
            
            # Add to pending changes for batch processing
            self.pending_changes[file_path] = {
                "file_path": file_path,
                "change_type": change_type,
                "timestamp": datetime.utcnow(),
                "detected_at": time.time()
            }
            
            self.stats["changes_detected"] += 1
            self.stats["last_change"] = datetime.utcnow().isoformat()
            
            log_debug(f"File change detected: {file_path} ({change_type})")
            
        except Exception as e:
            self.stats["errors_encountered"] += 1
            log_warning(f"Error handling file change {file_path}: {e}")
    
    async def _batch_processor(self):
        """Background task to process file changes in batches."""
        while True:
            try:
                await asyncio.sleep(self.batch_processing_interval)
                
                if self.pending_changes:
                    changes_to_process = dict(self.pending_changes)
                    self.pending_changes.clear()
                    
                    await self._process_file_changes(list(changes_to_process.values()))
                    
            except asyncio.CancelledError:
                break
            except Exception as e:
                self.stats["errors_encountered"] += 1
                log_warning(f"Error in batch processor: {e}")
                await asyncio.sleep(10)  # Wait before retrying
    
    async def _process_file_changes(self, changes: List[Dict[str, Any]]):
        """Process a batch of file changes."""
        try:
            if not changes:
                return
            
            log_debug(f"Processing {len(changes)} file changes")
            
            changed_files = []
            for change in changes:
                file_path = change["file_path"]
                change_type = change["change_type"]
                
                # Update file change tracking
                await self._update_file_change_record(file_path, change_type)
                
                # Collect changed files for invalidation
                changed_files.append(file_path)
            
            # Invalidate related cache entries and analysis results
            await self._invalidate_related_data(changed_files)
            
        except Exception as e:
            self.stats["errors_encountered"] += 1
            error_response = handle_error(e, {"operation": "process_file_changes"})
            log_warning(f"Error processing file changes: {error_response['message']}")
    
    async def _update_file_change_record(self, file_path: str, change_type: str):
        """Update or create a file change record in the database."""
        if not self.db_manager:
            return
        
        try:
            file_path_obj = Path(file_path)
            
            if change_type == "deleted":
                # Remove file change record for deleted files
                existing_records = await self.db_manager.get_by_filter(
                    FileChangeModel, 
                    file_path=file_path
                )
                for record in existing_records:
                    await self.db_manager.delete(record)
                return
            
            if not file_path_obj.exists():
                return
            
            # Calculate file hash and metadata
            file_hash = self._calculate_file_hash(file_path_obj)
            file_stat = file_path_obj.stat()
            
            # Check if record exists
            existing_records = await self.db_manager.get_by_filter(
                FileChangeModel,
                file_path=file_path
            )
            
            if existing_records:
                # Update existing record
                record = existing_records[0]
                await self.db_manager.update(record,
                    file_hash=file_hash,
                    file_size=file_stat.st_size,
                    file_mtime=datetime.fromtimestamp(file_stat.st_mtime)
                )
            else:
                # Create new record
                record = FileChangeModel(
                    file_path=file_path,
                    file_hash=file_hash,
                    file_size=file_stat.st_size,
                    file_mtime=datetime.fromtimestamp(file_stat.st_mtime),
                    language=self._detect_language(file_path_obj)
                )
                await self.db_manager.create(record)
            
        except Exception as e:
            log_warning(f"Failed to update file change record for {file_path}: {e}")
    
    def _calculate_file_hash(self, file_path: Path) -> str:
        """Calculate SHA-256 hash of a file."""
        try:
            hash_sha256 = hashlib.sha256()
            with open(file_path, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    hash_sha256.update(chunk)
            return hash_sha256.hexdigest()
        except Exception as e:
            log_warning(f"Failed to calculate hash for {file_path}: {e}")
            return ""
    
    def _detect_language(self, file_path: Path) -> Optional[str]:
        """Detect programming language from file extension."""
        extension = file_path.suffix.lower()
        
        language_map = {
            ".py": "python",
            ".cpp": "cpp",
            ".cxx": "cpp", 
            ".cc": "cpp",
            ".c": "c",
            ".h": "c",
            ".hpp": "cpp",
            ".f90": "fortran",
            ".f95": "fortran",
            ".f03": "fortran",
            ".f08": "fortran",
            ".ts": "typescript",
            ".js": "javascript",
            ".tex": "latex",
            ".md": "markdown"
        }
        
        return language_map.get(extension)
    
    async def _invalidate_related_data(self, changed_files: List[str]):
        """Invalidate cache entries and analysis results related to changed files."""
        try:
            invalidated_cache_count = 0
            invalidated_analysis_count = 0
            
            for file_path in changed_files:
                # Invalidate cache entries related to this file
                if self.cache_manager:
                    cache_tags = [
                        f"file:{file_path}",
                        f"dir:{str(Path(file_path).parent)}",
                        f"ext:{Path(file_path).suffix}"
                    ]
                    
                    count = await self.cache_manager.invalidate_by_tags(cache_tags)
                    invalidated_cache_count += count
                
                # Invalidate analysis results related to this file
                if self.db_manager:
                    analysis_results = await self.db_manager.get_by_filter(
                        AnalysisResultModel,
                        target_path=file_path
                    )
                    
                    for result in analysis_results:
                        await self.db_manager.delete(result)
                        invalidated_analysis_count += 1
                    
                    # Also invalidate analysis results for the directory
                    dir_path = str(Path(file_path).parent)
                    dir_results = await self.db_manager.get_by_filter(
                        AnalysisResultModel,
                        target_path=dir_path
                    )
                    
                    for result in dir_results:
                        await self.db_manager.delete(result)
                        invalidated_analysis_count += 1
            
            self.stats["cache_invalidations"] += invalidated_cache_count
            self.stats["analysis_invalidations"] += invalidated_analysis_count
            
            if invalidated_cache_count > 0 or invalidated_analysis_count > 0:
                log_debug(f"Invalidated {invalidated_cache_count} cache entries and {invalidated_analysis_count} analysis results")
            
            # Notify callbacks
            for callback in self.invalidation_callbacks:
                try:
                    callback(changed_files)
                except Exception as e:
                    log_warning(f"Error in invalidation callback: {e}")
                    
        except Exception as e:
            error_response = handle_error(e, {"operation": "invalidate_related_data"})
            log_warning(f"Error invalidating related data: {error_response['message']}")
    
    async def _scan_directory(self, directory: Path):
        """Scan a directory to build initial file change records."""
        try:
            file_count = 0
            
            for file_path in directory.rglob("*"):
                if file_path.is_file() and not self._should_ignore_file(str(file_path)):
                    await self._update_file_change_record(str(file_path), "scanned")
                    file_count += 1
            
            self.stats["files_watched"] += file_count
            log_debug(f"Scanned {file_count} files in {directory}")
            
        except Exception as e:
            log_warning(f"Error scanning directory {directory}: {e}")
    
    async def _periodic_scan(self):
        """Periodic scan to catch changes that may have been missed."""
        while True:
            try:
                await asyncio.sleep(300)  # 5 minutes
                
                if not self.watched_directories:
                    continue
                
                # Quick scan of watched directories
                for directory in self.watched_directories:
                    await self._scan_directory(directory)
                
            except asyncio.CancelledError:
                break
            except Exception as e:
                log_warning(f"Error in periodic scan: {e}")
                await asyncio.sleep(60)  # Wait before retrying
    
    def add_invalidation_callback(self, callback: Callable[[List[str]], None]):
        """Add a callback to be called when files are invalidated."""
        self.invalidation_callbacks.append(callback)
    
    def remove_invalidation_callback(self, callback: Callable[[List[str]], None]):
        """Remove an invalidation callback."""
        if callback in self.invalidation_callbacks:
            self.invalidation_callbacks.remove(callback)
    
    async def force_invalidate_file(self, file_path: str):
        """Force invalidation of a specific file."""
        await self._invalidate_related_data([file_path])
    
    async def force_invalidate_directory(self, directory: str):
        """Force invalidation of all files in a directory."""
        try:
            directory_path = Path(directory)
            changed_files = []
            
            for file_path in directory_path.rglob("*"):
                if file_path.is_file() and not self._should_ignore_file(str(file_path)):
                    changed_files.append(str(file_path))
            
            await self._invalidate_related_data(changed_files)
            
        except Exception as e:
            error_response = handle_error(e, {
                "operation": "force_invalidate_directory",
                "directory": directory
            })
            raise FileWatcherError(f"Failed to force invalidate directory: {error_response['message']}")
    
    async def get_file_change_stats(self) -> Dict[str, Any]:
        """Get file change detection statistics."""
        try:
            watched_count = len(self.watched_directories)
            
            # Get database stats if available
            db_stats = {}
            if self.db_manager:
                file_record_count = await self.db_manager.count(FileChangeModel)
                db_stats = {
                    "tracked_files": file_record_count
                }
            
            return {
                "file_watcher_stats": self.stats.copy(),
                "watched_directories": [str(d) for d in self.watched_directories],
                "watched_directory_count": watched_count,
                "ignore_patterns": self.ignore_patterns,
                "is_watching": self.is_watching,
                "pending_changes": len(self.pending_changes),
                **db_stats
            }
            
        except Exception as e:
            error_response = handle_error(e, {"operation": "get_file_change_stats"})
            return {
                "file_watcher_stats": self.stats.copy(),
                "error": error_response["message"]
            }


# Global file change detector instance
_global_file_detector = None


async def get_file_change_detector(config: Config = None,
                                 db_manager: DatabaseManager = None,
                                 cache_manager: CacheManager = None) -> FileChangeDetector:
    """Get the global file change detector instance."""
    global _global_file_detector
    
    if _global_file_detector is None:
        _global_file_detector = FileChangeDetector(config, db_manager, cache_manager)
        await _global_file_detector.initialize()
    
    return _global_file_detector


async def close_file_change_detector():
    """Close the global file change detector."""
    global _global_file_detector
    
    if _global_file_detector:
        await _global_file_detector.stop_watching()
        _global_file_detector = None