"""
LogRun-specific database adapter for the unified log database manager.

This adapter provides the same interface as the original LogStorage
but uses the new unified LogDatabaseManager internally.
"""

import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional, Any

from log_database_manager import LogDatabaseManager
from .models import LogEntry, LogRunConfig

logger = logging.getLogger(__name__)


class LogRunDatabaseAdapter:
    """Adapter to provide LogStorage-compatible interface using LogDatabaseManager."""
    
    def __init__(self, storage_file: Optional[str] = None):
        """Initialize adapter with the unified database manager."""
        if storage_file is None:
            # Use XDG cache directory standard: ~/.cache/logrun/
            cache_dir = Path.home() / ".cache" / "logrun"
            db_file = str(cache_dir / "logrun_history.db")
            logs_dir = str(cache_dir / "logs")
        else:
            # Convert JSON file path to DB path
            storage_path = Path(storage_file)
            db_file = str(storage_path.with_suffix('.db'))
            logs_dir = str(storage_path.parent / "logs")
        
        self.db_manager = LogDatabaseManager(
            db_path=db_file,
            logs_dir=logs_dir
        )
        self.config = LogRunConfig()
        logger.info(f"LogRun Database adapter initialized")
    
    def add_entry(self, command: str, log_file: str, tags: Optional[List[str]] = None) -> LogEntry:
        """Add a new log entry."""
        start_time = datetime.now()
        
        # Read log content if file exists
        log_content = None
        log_path = Path(log_file)
        if log_path.exists():
            try:
                log_content = log_path.read_text(encoding='utf-8')
            except Exception as e:
                logger.warning(f"Could not read log file {log_file}: {e}")
        
        # Add to unified database
        run_id = self.db_manager.add_run(
            run_type="command",
            name=command,
            config={"command": command, "log_file": log_file},
            log_content=log_content,
            tags=tags or [],
            start_time=start_time,
            status="running"
        )
        
        # Create LogEntry for compatibility
        entry = LogEntry(
            id=run_id,
            command=command,
            log_file=log_file,
            start_time=start_time,
            end_time=None,
            exit_code=None,
            duration=None,
            tags=tags or []
        )
        
        return entry
    
    def update_entry(self, entry_id: int, **kwargs) -> Optional[LogEntry]:
        """Update an existing log entry."""
        # Get current entry from unified database
        db_entry = self.db_manager.get_run(entry_id)
        if not db_entry:
            return None
        
        # Convert update parameters
        update_params = {}
        
        if 'end_time' in kwargs:
            update_params['end_time'] = kwargs['end_time']
        if 'exit_code' in kwargs:
            # Update results with exit code
            results = db_entry.results.copy()
            results['exit_code'] = kwargs['exit_code']
            update_params['results'] = results
            
            # Update status based on exit code
            if kwargs['exit_code'] == 0:
                update_params['status'] = 'success'
            else:
                update_params['status'] = 'failed'
        
        # Update in unified database
        updated_entry = self.db_manager.update_run(entry_id, **update_params)
        if not updated_entry:
            return None
        
        # Convert back to LogEntry format
        return self._convert_to_log_entry(updated_entry)
    
    def get_entries(self, limit: Optional[int] = None,
                   command_filter: Optional[str] = None,
                   status_filter: Optional[str] = None,
                   tag_filter: Optional[str] = None) -> List[LogEntry]:
        """Get log entries with optional filtering."""
        # Convert status filter
        db_status = None
        if status_filter == "success":
            db_status = "success"
        elif status_filter == "failed":
            db_status = "failed"
        elif status_filter == "running":
            db_status = "running"
        
        # Get entries from unified database
        db_entries = self.db_manager.get_runs(
            limit=limit,
            run_type="command",
            name=command_filter,
            status=db_status,
            tag=tag_filter
        )
        
        # Convert to LogEntry format
        return [self._convert_to_log_entry(entry) for entry in db_entries]
    
    def get_entry(self, entry_id: int) -> Optional[LogEntry]:
        """Get a specific log entry by ID."""
        db_entry = self.db_manager.get_run(entry_id)
        if not db_entry:
            return None
        
        return self._convert_to_log_entry(db_entry)
    
    def cleanup_old_entries(self, days: Optional[int] = None):
        """Remove old log entries and their files."""
        if days is None:
            days = self.config.cleanup_days
        
        count = self.db_manager.cleanup_old_runs(days)
        logger.info(f"Cleaned up {count} old entries")
    
    def get_statistics(self) -> Dict[str, Any]:
        """Get statistics about log entries."""
        stats = self.db_manager.get_statistics()
        
        # Filter for command-type entries and convert to old format
        command_entries = self.db_manager.get_runs(
            run_type="command", 
            limit=1000  # Get recent entries for stats
        )
        
        total_entries = len(command_entries)
        successful = len([e for e in command_entries if e.status == "success"])
        failed = len([e for e in command_entries if e.status == "failed"])
        running = len([e for e in command_entries if e.status == "running"])
        
        # Command frequency
        commands = {}
        for entry in command_entries:
            cmd = entry.command_name
            commands[cmd] = commands.get(cmd, 0) + 1
        
        return {
            "total_entries": total_entries,
            "successful": successful,
            "failed": failed,
            "running": running,
            "command_frequency": dict(sorted(commands.items(), key=lambda x: x[1], reverse=True)[:10])
        }
    
    def delete_entry(self, entry_id: int) -> bool:
        """Delete a specific log entry and its associated log file."""
        # Get the entry first to find the log file
        entry = self.get_entry(entry_id)
        if not entry:
            return False
        
        try:
            # Delete the log file if it exists
            log_path = Path(entry.log_file)
            if log_path.exists():
                log_path.unlink()
                logger.info(f"Deleted log file: {entry.log_file}")
        except Exception as e:
            logger.warning(f"Could not delete log file {entry.log_file}: {e}")
        
        # Delete from database
        with self.db_manager._get_connection() as conn:
            cursor = conn.cursor()
            cursor.execute("DELETE FROM runs WHERE id = ?", (entry_id,))
            conn.commit()
            
            if cursor.rowcount > 0:
                logger.info(f"Deleted log entry #{entry_id}")
                return True
            else:
                return False
    
    def resequence_ids(self):
        """Resequence all IDs to be consecutive starting from 1."""
        with self.db_manager._get_connection() as conn:
            # Get all entries ordered by creation time
            cursor = conn.cursor()
            cursor.execute("""
                SELECT id, run_type, name, category, config, results, status, 
                       log_path, start_time, end_time, duration, tags, metadata, created_at
                FROM runs 
                WHERE run_type = 'command'
                ORDER BY created_at, id
            """)
            
            entries = cursor.fetchall()
            if not entries:
                return
            
            # Clear the table and reinsert with new sequential IDs
            cursor.execute("DELETE FROM runs WHERE run_type = 'command'")
            
            # Reinsert with new IDs starting from 1
            for new_id, entry in enumerate(entries, 1):
                cursor.execute("""
                    INSERT INTO runs (id, run_type, name, category, config, results, status, 
                                    log_path, start_time, end_time, duration, tags, metadata, created_at)
                    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
                """, (new_id, entry[1], entry[2], entry[3], entry[4], entry[5], entry[6],
                      entry[7], entry[8], entry[9], entry[10], entry[11], entry[12], entry[13]))
            
            conn.commit()
            logger.info(f"Resequenced {len(entries)} entries")
    
    def _convert_to_log_entry(self, db_entry) -> LogEntry:
        """Convert unified database entry to LogEntry format."""
        # Extract original log file path from config
        log_file = db_entry.config.get('log_file', '') if db_entry.config else ''
        
        # Extract exit code from results
        exit_code = None
        if db_entry.results and 'exit_code' in db_entry.results:
            exit_code = db_entry.results['exit_code']
        
        return LogEntry(
            id=db_entry.id,
            command=db_entry.name,
            log_file=log_file,
            start_time=db_entry.start_time,
            end_time=db_entry.end_time,
            exit_code=exit_code,
            duration=db_entry.duration,
            tags=db_entry.tags
        )


# For backward compatibility, alias the adapter as LogStorage
LogStorage = LogRunDatabaseAdapter