"""
Log management system for GCR Solver Manager.

This module handles log file storage, parsing, and integration with the database.
"""

import logging
import re
from pathlib import Path
from typing import Dict, List, Optional, Any, Tuple
from datetime import datetime
import hashlib

from ..database.simple_db import SimpleDatabase
from ..core.simple_config import get_config

logger = logging.getLogger(__name__)


class LogManager:
    """Manages log files and database integration."""
    
    def __init__(self, log_base_dir: str = "logs", db: Optional[SimpleDatabase] = None):
        """Initialize log manager with base directory and database."""
        self.log_base_dir = Path(log_base_dir)
        self.log_base_dir.mkdir(parents=True, exist_ok=True)
        
        # Create subdirectories
        (self.log_base_dir / "builds").mkdir(exist_ok=True)
        (self.log_base_dir / "tests").mkdir(exist_ok=True)
        
        # Use config database path if no db instance provided
        if db is None:
            config = get_config()
            db_path = config.get('database.path')
            self.db = SimpleDatabase(db_path)
        else:
            self.db = db
        logger.info(f"Log manager initialized with directory: {self.log_base_dir}")
    
    def store_build_log(self, solver: str, gpu: Optional[str], config: Dict[str, Any], 
                       log_content: str, tags: Optional[List[str]] = None) -> int:
        """Store build log and record in database."""
        try:
            run_id = self.db.add_build(solver, gpu, config, log_content, tags)
            logger.info(f"Build log stored for solver={solver}, gpu={gpu}, run_id={run_id}")
            return run_id
        except Exception as e:
            logger.error(f"Failed to store build log: {e}")
            raise
    
    def store_test_log(self, solver: str, gpu: Optional[str], params: Dict[str, Any], 
                      log_content: str, tags: Optional[List[str]] = None) -> int:
        """Store test log and record in database."""
        try:
            run_id = self.db.add_test(solver, gpu, params, log_content, tags)
            logger.info(f"Test log stored for solver={solver}, gpu={gpu}, run_id={run_id}")
            return run_id
        except Exception as e:
            logger.error(f"Failed to store test log: {e}")
            raise
    
    def get_log_content(self, run_id: int) -> Optional[str]:
        """Retrieve log content for a specific run."""
        run_data = self.db.get_by_id(run_id)
        if not run_data or not run_data.get('log_path'):
            logger.warning(f"No log path found for run {run_id}")
            return None
        
        log_path = Path(run_data['log_path'])
        if not log_path.exists():
            logger.warning(f"Log file not found: {log_path}")
            return None
        
        try:
            return log_path.read_text()
        except Exception as e:
            logger.error(f"Failed to read log file {log_path}: {e}")
            return None
    
    def search_logs(self, query: str, run_type: Optional[str] = None) -> List[Dict[str, Any]]:
        """Search logs by text query."""
        return self.db.search_runs(query, run_type)
    
    def get_recent_logs(self, n: int = 10, run_type: Optional[str] = None, 
                       solver: Optional[str] = None) -> List[Dict[str, Any]]:
        """Get recent log entries."""
        return self.db.get_latest(n, run_type, solver)
    
    def analyze_build_log(self, log_content: str) -> Dict[str, Any]:
        """Analyze build log for errors, warnings, and success indicators."""
        analysis = {
            'errors': [],
            'warnings': [],
            'build_time': None,
            'status': 'unknown',
            'compiler_info': {},
            'dependencies': []
        }
        
        lines = log_content.splitlines()
        
        for line in lines:
            line_lower = line.lower()
            
            # Extract errors
            if any(error_marker in line_lower for error_marker in ['error:', 'fatal error', 'compilation failed']):
                analysis['errors'].append(line.strip())
            
            # Extract warnings
            elif 'warning:' in line_lower:
                analysis['warnings'].append(line.strip())
            
            # Extract build time
            elif 'elapsed time' in line_lower or 'build time' in line_lower:
                time_match = re.search(r'(\d+:\d+:\d+|\d+\.\d+|\d+ seconds?|\d+ minutes?)', line)
                if time_match:
                    analysis['build_time'] = time_match.group(1)
            
            # Extract compiler information
            elif any(compiler in line_lower for compiler in ['gcc', 'g++', 'nvcc', 'hipcc', 'clang']):
                if 'version' in line_lower:
                    analysis['compiler_info']['version_line'] = line.strip()
            
            # Extract dependency information
            elif any(dep in line_lower for dep in ['cmake', 'make', 'find_package', 'target_link_libraries']):
                analysis['dependencies'].append(line.strip())
        
        # Determine overall status
        if analysis['errors'] or 'failed' in log_content.lower():
            analysis['status'] = 'failed'
        elif 'success' in log_content.lower() or 'finished' in log_content.lower():
            analysis['status'] = 'success'
        
        # Limit lists to prevent excessive data
        analysis['errors'] = analysis['errors'][:10]
        analysis['warnings'] = analysis['warnings'][:20]
        analysis['dependencies'] = analysis['dependencies'][:15]
        
        return analysis
    
    def analyze_test_log(self, log_content: str) -> Dict[str, Any]:
        """Analyze test log for convergence, norms, and performance data."""
        analysis = {
            'norms': {},
            'convergence': {
                'converged': False,
                'iterations': None,
                'final_residual': None
            },
            'performance': {
                'total_time': None,
                'solver_time': None,
                'iteration_times': []
            },
            'errors': [],
            'gpu_info': {}
        }
        
        lines = log_content.splitlines()
        
        for line in lines:
            line_clean = line.strip()
            line_lower = line_clean.lower()
            
            # Extract norm values
            norm_patterns = [
                r'(\w*norm\w*)\s*[:=]\s*([\d\.e\-\+]+)',
                r'residual\s*[:=]\s*([\d\.e\-\+]+)',
                r'final\s+(\w+)\s*[:=]\s*([\d\.e\-\+]+)'
            ]
            
            for pattern in norm_patterns:
                match = re.search(pattern, line_lower)
                if match:
                    if len(match.groups()) == 2:
                        key, value = match.groups()
                        try:
                            analysis['norms'][key] = float(value)
                        except ValueError:
                            pass
                    elif len(match.groups()) == 1:
                        try:
                            analysis['norms']['residual'] = float(match.group(1))
                        except ValueError:
                            pass
            
            # Extract convergence information
            if 'converged' in line_lower and 'failed' not in line_lower:
                analysis['convergence']['converged'] = True
                
                # Extract iteration count
                iter_match = re.search(r'(\d+)\s+iterations?', line_lower)
                if iter_match:
                    analysis['convergence']['iterations'] = int(iter_match.group(1))
            
            # Extract timing information
            time_patterns = [
                r'total\s+time\s*[:=]\s*([\d\.e\-\+]+)',
                r'solver\s+time\s*[:=]\s*([\d\.e\-\+]+)',
                r'iteration\s+(\d+).*time\s*[:=]\s*([\d\.e\-\+]+)',
                r'elapsed\s+time\s*[:=]\s*([\d\.e\-\+]+)'
            ]
            
            for pattern in time_patterns:
                match = re.search(pattern, line_lower)
                if match:
                    if 'total' in pattern:
                        analysis['performance']['total_time'] = float(match.group(1))
                    elif 'solver' in pattern:
                        analysis['performance']['solver_time'] = float(match.group(1))
                    elif 'iteration' in pattern:
                        try:
                            iter_num = int(match.group(1))
                            iter_time = float(match.group(2))
                            analysis['performance']['iteration_times'].append({
                                'iteration': iter_num,
                                'time': iter_time
                            })
                        except ValueError:
                            pass
                    elif 'elapsed' in pattern:
                        analysis['performance']['total_time'] = float(match.group(1))
            
            # Extract GPU information
            if any(gpu_term in line_lower for gpu_term in ['cuda', 'gpu', 'device', 'nccl', 'hip', 'rccl']):
                if 'memory' in line_lower:
                    mem_match = re.search(r'memory\s*[:=]\s*([\d\.]+)\s*(mb|gb|bytes?)', line_lower)
                    if mem_match:
                        analysis['gpu_info']['memory'] = f"{mem_match.group(1)} {mem_match.group(2)}"
                
                if 'device' in line_lower and 'id' in line_lower:
                    device_match = re.search(r'device\s+(\d+)', line_lower)
                    if device_match:
                        analysis['gpu_info']['device_id'] = int(device_match.group(1))
            
            # Extract errors
            if any(error_term in line_lower for error_term in ['error', 'failed', 'exception']):
                analysis['errors'].append(line_clean)
        
        # Set final residual from norms if available
        if 'residual' in analysis['norms']:
            analysis['convergence']['final_residual'] = analysis['norms']['residual']
        elif analysis['norms']:
            # Use the last norm value as final residual
            last_norm_key = sorted(analysis['norms'].keys())[-1]
            analysis['convergence']['final_residual'] = analysis['norms'][last_norm_key]
        
        # Limit error list
        analysis['errors'] = analysis['errors'][:5]
        
        # Limit iteration times to last 10
        analysis['performance']['iteration_times'] = analysis['performance']['iteration_times'][-10:]
        
        return analysis
    
    def compare_test_runs(self, run_id1: int, run_id2: int) -> Optional[Dict[str, Any]]:
        """Compare two test runs and their results."""
        return self.db.compare_norms(run_id1, run_id2)
    
    def get_log_statistics(self) -> Dict[str, Any]:
        """Get statistics about stored logs."""
        db_stats = self.db.get_statistics()
        
        # Add file system statistics
        builds_dir = self.log_base_dir / "builds"
        tests_dir = self.log_base_dir / "tests"
        
        build_files = list(builds_dir.glob("*.log")) if builds_dir.exists() else []
        test_files = list(tests_dir.glob("*.log")) if tests_dir.exists() else []
        
        total_size = sum(f.stat().st_size for f in build_files + test_files)
        
        file_stats = {
            'build_log_files': len(build_files),
            'test_log_files': len(test_files),
            'total_log_files': len(build_files) + len(test_files),
            'total_size_bytes': total_size,
            'total_size_mb': round(total_size / (1024 * 1024), 2),
            'log_directory': str(self.log_base_dir)
        }
        
        return {
            'database': db_stats,
            'files': file_stats
        }
    
    def cleanup_old_logs(self, days: int = 90) -> Dict[str, int]:
        """Clean up old log files and database entries."""
        # Cleanup database entries
        db_deleted = self.db.cleanup_old_runs(days)
        
        # Cleanup files older than specified days
        cutoff_time = datetime.now().timestamp() - (days * 24 * 3600)
        file_deleted = 0
        
        for log_dir in [self.log_base_dir / "builds", self.log_base_dir / "tests"]:
            if log_dir.exists():
                for log_file in log_dir.glob("*.log"):
                    if log_file.stat().st_mtime < cutoff_time:
                        try:
                            log_file.unlink()
                            file_deleted += 1
                        except Exception as e:
                            logger.warning(f"Failed to delete {log_file}: {e}")
        
        logger.info(f"Cleanup completed: {db_deleted} database entries, {file_deleted} files")
        return {
            'database_entries_deleted': db_deleted,
            'log_files_deleted': file_deleted
        }