"""
Batch operations for GCR Solver Manager.

This module handles batch execution of multiple builds and tests.
"""

import yaml
import json
import logging
import concurrent.futures
from pathlib import Path
from typing import Dict, List, Optional, Any, Tuple
from datetime import datetime

from ..core.simple_config import get_config
from ..test.simple_runner import SimpleTestRunner
from ..logs.manager import LogManager

logger = logging.getLogger(__name__)


class BatchRunner:
    """Handles batch execution of multiple solver configurations."""
    
    def __init__(self):
        """Initialize batch runner."""
        self.config = get_config()
        # Build manager removed - using binary execution only
        self.test_runner = SimpleTestRunner()
        self.log_manager = LogManager()
    
    def run_batch_from_config(self, config_file: str, parallel: bool = False, 
                             max_workers: int = 4) -> Dict[str, Any]:
        """
        Run batch operations from configuration file.
        
        Args:
            config_file: Path to batch configuration file
            parallel: Whether to run jobs in parallel
            max_workers: Maximum number of parallel workers
            
        Returns:
            Batch execution results
        """
        try:
            config_path = Path(config_file)
            if not config_path.exists():
                raise FileNotFoundError(f"Batch configuration file not found: {config_file}")
            
            # Load configuration
            with open(config_path, 'r') as f:
                if config_path.suffix.lower() in ['.yaml', '.yml']:
                    batch_config = yaml.safe_load(f)
                elif config_path.suffix.lower() == '.json':
                    batch_config = json.load(f)
                else:
                    raise ValueError(f"Unsupported config format: {config_path.suffix}")
            
            return self._execute_batch(batch_config, parallel, max_workers)
            
        except Exception as e:
            logger.error(f"Batch execution failed: {e}")
            return {'success': False, 'error': str(e)}
    
    def run_batch_matrix(self, solvers: List[str], gpu_types: List[str], 
                        operation: str = 'test', params: Optional[Dict[str, Any]] = None,
                        parallel: bool = False, max_workers: int = 4) -> Dict[str, Any]:
        """
        Run batch matrix of solver/GPU combinations.
        
        Args:
            solvers: List of solver types
            gpu_types: List of GPU types
            operation: Operation type ('build' or 'test')
            params: Common parameters for all runs
            parallel: Whether to run jobs in parallel
            max_workers: Maximum number of parallel workers
            
        Returns:
            Batch execution results
        """
        params = params or {}
        
        # Generate all combinations
        combinations = []
        for solver in solvers:
            for gpu in gpu_types:
                combo = {
                    'solver': solver,
                    'gpu': gpu if gpu != 'cpu' else None,
                    'operation': operation,
                    'params': params.copy()
                }
                combinations.append(combo)
        
        batch_config = {
            'name': f'Matrix {operation} - {len(combinations)} combinations',
            'jobs': combinations
        }
        
        return self._execute_batch(batch_config, parallel, max_workers)
    
    def _execute_batch(self, batch_config: Dict[str, Any], parallel: bool, 
                      max_workers: int) -> Dict[str, Any]:
        """Execute batch configuration."""
        jobs = batch_config.get('jobs', [])
        if not jobs:
            return {'success': False, 'error': 'No jobs defined in batch configuration'}
        
        results = {
            'batch_name': batch_config.get('name', 'Unnamed Batch'),
            'start_time': datetime.now().isoformat(),
            'total_jobs': len(jobs),
            'completed_jobs': 0,
            'successful_jobs': 0,
            'failed_jobs': 0,
            'jobs': [],
            'success': False
        }
        
        logger.info(f"Starting batch execution: {results['batch_name']} ({results['total_jobs']} jobs)")
        
        try:
            if parallel and max_workers > 1:
                results = self._execute_parallel(jobs, results, max_workers)
            else:
                results = self._execute_sequential(jobs, results)
            
            results['end_time'] = datetime.now().isoformat()
            results['success'] = results['failed_jobs'] == 0
            
            logger.info(f"Batch completed: {results['successful_jobs']} successful, "
                       f"{results['failed_jobs']} failed")
            
        except Exception as e:
            logger.error(f"Batch execution error: {e}")
            results['error'] = str(e)
            results['end_time'] = datetime.now().isoformat()
        
        return results
    
    def _execute_sequential(self, jobs: List[Dict[str, Any]], 
                           results: Dict[str, Any]) -> Dict[str, Any]:
        """Execute jobs sequentially."""
        for i, job in enumerate(jobs, 1):
            logger.info(f"Executing job {i}/{len(jobs)}: {self._job_description(job)}")
            
            job_result = self._execute_single_job(job)
            results['jobs'].append(job_result)
            results['completed_jobs'] += 1
            
            if job_result['success']:
                results['successful_jobs'] += 1
            else:
                results['failed_jobs'] += 1
        
        return results
    
    def _execute_parallel(self, jobs: List[Dict[str, Any]], results: Dict[str, Any], 
                         max_workers: int) -> Dict[str, Any]:
        """Execute jobs in parallel."""
        with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
            # Submit all jobs
            future_to_job = {
                executor.submit(self._execute_single_job, job): job 
                for job in jobs
            }
            
            # Collect results as they complete
            for future in concurrent.futures.as_completed(future_to_job):
                job = future_to_job[future]
                try:
                    job_result = future.result()
                    results['jobs'].append(job_result)
                    results['completed_jobs'] += 1
                    
                    if job_result['success']:
                        results['successful_jobs'] += 1
                    else:
                        results['failed_jobs'] += 1
                    
                    logger.info(f"Completed job ({results['completed_jobs']}/{len(jobs)}): "
                               f"{self._job_description(job)} - "
                               f"{'SUCCESS' if job_result['success'] else 'FAILED'}")
                
                except Exception as e:
                    error_result = {
                        'job': job,
                        'success': False,
                        'error': str(e),
                        'run_id': None
                    }
                    results['jobs'].append(error_result)
                    results['completed_jobs'] += 1
                    results['failed_jobs'] += 1
                    
                    logger.error(f"Job failed with exception: {self._job_description(job)} - {e}")
        
        return results
    
    def _execute_single_job(self, job: Dict[str, Any]) -> Dict[str, Any]:
        """Execute a single job."""
        try:
            operation = job.get('operation', 'test')
            solver = job['solver']
            gpu = job.get('gpu')
            params = job.get('params', {})
            tags = params.get('tags', [])
            
            # Add batch tag
            if isinstance(tags, list):
                tags.append('batch')
            
            if operation == 'build':
                success, run_id = self.build_manager.build_solver(
                    solver=solver,
                    gpu=gpu,
                    options=params,
                    tags=tags
                )
            elif operation == 'test':
                success, run_id = self.test_runner.run_test(
                    solver=solver,
                    gpu=gpu,
                    params=params,
                    tags=tags
                )
            else:
                raise ValueError(f"Unknown operation: {operation}")
            
            return {
                'job': job,
                'success': success,
                'run_id': run_id,
                'operation': operation
            }
            
        except Exception as e:
            return {
                'job': job,
                'success': False,
                'error': str(e),
                'run_id': None
            }
    
    def _job_description(self, job: Dict[str, Any]) -> str:
        """Generate a description for a job."""
        operation = job.get('operation', 'test')
        solver = job.get('solver', 'unknown')
        gpu = job.get('gpu', 'cpu')
        return f"{operation} {solver} {gpu}"
    
    def create_batch_config_template(self, output_file: str) -> bool:
        """Create a batch configuration template file."""
        try:
            template = {
                'name': 'Example Batch Configuration',
                'description': 'Template for batch operations',
                'jobs': [
                    {
                        'operation': 'build',
                        'solver': 'gcr',
                        'gpu': 'cuda',
                        'params': {
                            'debug': True,
                            'parallel': 4,
                            'tags': ['template', 'build']
                        }
                    },
                    {
                        'operation': 'test',
                        'solver': 'gcr',
                        'gpu': 'cuda',
                        'params': {
                            'nproc': 4,
                            'resolution': 1.0,
                            'debug': True,
                            'tags': ['template', 'test']
                        }
                    },
                    {
                        'operation': 'test',
                        'solver': 'gmres',
                        'gpu': 'hip',
                        'params': {
                            'nproc': 8,
                            'resolution': 0.5,
                            'maxit': 50,
                            'tags': ['template', 'performance']
                        }
                    }
                ]
            }
            
            output_path = Path(output_file)
            output_path.parent.mkdir(parents=True, exist_ok=True)
            
            if output_path.suffix.lower() in ['.yaml', '.yml']:
                with open(output_path, 'w') as f:
                    yaml.dump(template, f, default_flow_style=False, sort_keys=False)
            else:
                with open(output_path, 'w') as f:
                    json.dump(template, f, indent=2)
            
            logger.info(f"Batch configuration template created: {output_file}")
            return True
            
        except Exception as e:
            logger.error(f"Failed to create batch template: {e}")
            return False
    
    def get_batch_history(self, days: int = 7) -> List[Dict[str, Any]]:
        """Get history of batch runs from database."""
        # Get runs tagged with 'batch'
        batch_runs = []
        recent_runs = self.log_manager.db.get_latest(500)  # Get many runs
        
        # Group by batch (runs with same timestamp pattern)
        batch_groups = {}
        for run in recent_runs:
            if 'batch' in run.get('tags', []):
                # Group by day to identify batch runs
                created_date = run['created_at'][:10]  # YYYY-MM-DD
                if created_date not in batch_groups:
                    batch_groups[created_date] = []
                batch_groups[created_date].append(run)
        
        # Convert to batch summary
        for date, runs in batch_groups.items():
            successful = len([r for r in runs if r['status'] == 'success'])
            failed = len([r for r in runs if r['status'] == 'failed'])
            
            batch_info = {
                'date': date,
                'total_jobs': len(runs),
                'successful_jobs': successful,
                'failed_jobs': failed,
                'runs': runs
            }
            batch_runs.append(batch_info)
        
        # Sort by date, newest first
        batch_runs.sort(key=lambda x: x['date'], reverse=True)
        return batch_runs[:days]  # Limit to requested days