#!/usr/bin/env python3
"""
GSI stdout Analysis Tool
Automatically extracts and analyzes key information from GSI stdout files
"""

import re
import sys
from datetime import datetime
from collections import defaultdict
from typing import Dict, List, Tuple, Optional
import json
import os
import copy

class GSIAnalyzer:
    def __init__(self, stdout_file: str):
        self.stdout_file = stdout_file
        self.content = []
        self.analysis = {
            'execution_status': {},
            'configuration': {},
            'grid_info': {},
            'observations': {},
            'minimization': {},
            'increments': {},
            'performance': {},
            'issues': [],
            'ensemble_info': {},
            'background_fields': {},
            'radiance_info': {}
        }
        
    def read_file(self):
        """Read the stdout file content"""
        try:
            with open(self.stdout_file, 'r') as f:
                self.content = f.readlines()
        except FileNotFoundError:
            print(f"Error: File {self.stdout_file} not found")
            sys.exit(1)
            
    def analyze(self):
        """Main analysis function"""
        self.extract_execution_status()
        self.extract_configuration()
        self.extract_grid_info()
        self.extract_ensemble_info()
        self.extract_background_fields()
        self.extract_observations()
        self.extract_radiance_info()
        self.extract_minimization()
        self.extract_increments()
        self.extract_performance()
        self.identify_issues()
        
    def extract_execution_status(self):
        """Extract program execution status"""
        for i, line in enumerate(self.content):
            # Start time
            if "STARTING DATE-TIME" in line:
                self.analysis['execution_status']['start_time'] = line.strip()
                
            # End time and status
            if "ENDING DATE-TIME" in line:
                self.analysis['execution_status']['end_time'] = line.strip()
                self.analysis['execution_status']['completed'] = True
                
            # Analysis time
            if "analysis date,minutes" in line:
                match = re.search(r'(\d{4})\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)', line)
                if match:
                    year, month, day, hour, minute = match.groups()
                    self.analysis['execution_status']['analysis_time'] = f"{year}-{month:0>2}-{day:0>2} {hour:0>2}:{minute:0>2} UTC"
                    
    def extract_configuration(self):
        """Extract run configuration"""
        config = self.analysis['configuration']
        
        for line in self.content:
            # MPI configuration
            if "INIT_IO:" in line and "mype_io=" in line:
                match = re.search(r'mype_io=\s*(\d+)', line)
                if match:
                    config['mpi_tasks'] = int(match.group(1)) + 1
                    
            # Time window
            if "nhr_assimilation=" in line:
                match = re.search(r'nhr_assimilation=\s*(\d+)', line)
                if match:
                    config['time_window_hours'] = int(match.group(1))
                    
            # Observation bins
            if "nobs_bins =" in line:
                match = re.search(r'nobs_bins =\s*(\d+)', line)
                if match:
                    config['obs_bins'] = int(match.group(1))
                    
            # Model type
            if "wrf_nmm_regional=" in line:
                if "wrf_nmm_regional=T" in line:
                    config['model_type'] = 'WRF-NMM'
                else:
                    config['model_type'] = 'WRF-ARW'
            elif "wrf_mass_regional=" in line:
                if "wrf_mass_regional=T" in line:
                    config['model_type'] = 'WRF-ARW'
                    
            # Variational mode
            if "l4dvar=" in line:
                if "l4dvar= F" in line:
                    config['variational_mode'] = '3D-Var'
                else:
                    config['variational_mode'] = '4D-Var'
                    
            # Check for hybrid mode indicators
            if "n_ens=" in line or "ensemble_path=" in line:
                match = re.search(r'n_ens=\s*(\d+)', line)
                if match:
                    config['ensemble_members'] = int(match.group(1))
                    config['hybrid_mode'] = True
                    
            # Cloud analysis
            if "l_cloud_analysis=" in line:
                config['cloud_analysis'] = "=T" in line or "=.true." in line.lower()
                    
    def extract_grid_info(self):
        """Extract grid information"""
        grid = self.analysis['grid_info']
        
        for line in self.content:
            # Grid dimensions
            if "nlon,lat,sig_regional=" in line:
                match = re.search(r'nlon,lat,sig_regional=\s*(\d+)\s+(\d+)\s+(\d+)', line)
                if match:
                    grid['nx'] = int(match.group(1))
                    grid['ny'] = int(match.group(2))
                    grid['nz'] = int(match.group(3))
                    
            # Grid spacing
            if "1/rdx=" in line:
                match = re.search(r'1/rdx=\s*([\d.]+)', line)
                if match:
                    grid['dx_meters'] = float(match.group(1))
                    
            if "1/rdy=" in line:
                match = re.search(r'1/rdy=\s*([\d.]+)', line)
                if match:
                    grid['dy_meters'] = float(match.group(1))
                    
            # Domain bounds
            if "max,min XLAT" in line and "xlat(1,1)" not in line:
                match = re.search(r'max,min XLAT.*=\s*([\d.-]+)\s+([\d.-]+)', line)
                if match:
                    grid['lat_max'] = float(match.group(1))
                    grid['lat_min'] = float(match.group(2))
                    
            if "max,min XLONG" in line and "xlong(1,1)" not in line:
                match = re.search(r'max,min XLONG.*=\s*([\d.-]+)\s+([\d.-]+)', line)
                if match:
                    grid['lon_max'] = float(match.group(1))
                    grid['lon_min'] = float(match.group(2))
                    
            # Model top
            if "p_top=" in line:
                match = re.search(r'p_top=\s*([\d.]+)', line)
                if match:
                    grid['p_top_pa'] = float(match.group(1))
                    
    def extract_ensemble_info(self):
        """Extract ensemble-related information"""
        ensemble = self.analysis['ensemble_info']
        ensemble['uses_ensemble'] = False
        ensemble['ensemble_weight'] = 0.0
        
        for line in self.content:
            # Check for ensemble usage
            if "use_gfs_ens" in line or "ensemble_path" in line:
                ensemble['uses_ensemble'] = True
                
            # Ensemble weight (beta)
            if "ensemble weight" in line or "beta_s=" in line or "beta1_inv=" in line:
                match = re.search(r'([\d.]+)', line)
                if match:
                    ensemble['ensemble_weight'] = float(match.group(1))
                    
            # Number of ensemble members
            if "n_ens=" in line:
                match = re.search(r'n_ens=\s*(\d+)', line)
                if match:
                    ensemble['n_members'] = int(match.group(1))
                    ensemble['uses_ensemble'] = int(match.group(1)) > 0
                    
    def extract_background_fields(self):
        """Extract background field statistics"""
        bg = self.analysis['background_fields']
        
        # Pattern for field ranges
        range_pattern = re.compile(r'(\w+)\s+k,max,min,mid=\s*\d+\s+([\d.-]+)\s+([\d.-]+)')
        
        for line in self.content:
            match = range_pattern.search(line)
            if match:
                field_name = match.group(1)
                max_val = float(match.group(2))
                min_val = float(match.group(3))
                
                if field_name not in bg:
                    bg[field_name] = {'min': min_val, 'max': max_val}
                else:
                    # Update ranges
                    bg[field_name]['min'] = min(bg[field_name]['min'], min_val)
                    bg[field_name]['max'] = max(bg[field_name]['max'], max_val)
                    
            # Surface pressure range
            if "PREINIT_REG_GLOB_LL: min,max ps_in" in line:
                match = re.search(r'min,max ps_in\s*=\s*([\d.]+)\s+([\d.]+)', line)
                if match:
                    bg['ps'] = {
                        'min': float(match.group(1)),
                        'max': float(match.group(2))
                    }
                    
    def extract_observations(self):
        """Extract observation statistics"""
        obs = self.analysis['observations']
        obs['types'] = {}
        obs['total_read'] = 0
        obs['total_used'] = 0
        obs['thinning'] = {}
        
        # Pattern for observation summary lines
        obs_pattern = re.compile(r'READ_\w+\s*:\s*file=\w+\s+type=(\w+)\s+.*nread=\s*(\d+).*nkeep=\s*(\d+)')
        
        # Pattern for thinning mesh
        thin_pattern = re.compile(r'type=(\w+).*rmesh=\s*([\d.]+)')
        
        for line in self.content:
            # Observation counts
            match = obs_pattern.search(line)
            if match:
                obs_type = match.group(1)
                nread = int(match.group(2))
                nkeep = int(match.group(3))
                
                obs['types'][obs_type] = {
                    'read': nread,
                    'kept': nkeep,
                    'usage_rate': nkeep / nread if nread > 0 else 0
                }
                obs['total_read'] += nread
                obs['total_used'] += nkeep
                
            # Thinning information
            match = thin_pattern.search(line)
            if match:
                obs_type = match.group(1)
                mesh_km = float(match.group(2))
                obs['thinning'][obs_type] = mesh_km
                
        if obs['total_read'] > 0:
            obs['overall_usage_rate'] = obs['total_used'] / obs['total_read']
            
    def extract_radiance_info(self):
        """Extract satellite radiance information"""
        rad = self.analysis['radiance_info']
        rad['satellites'] = []
        rad['total_channels'] = 0
        rad['used_channels'] = 0
        rad['has_radiances'] = False
        
        # Check for radiance usage
        for line in self.content:
            if "READ_RADIAG" in line or "setuprad:" in line:
                rad['has_radiances'] = True
                
            # Count satellite sensors
            if "sensor/platform" in line:
                match = re.search(r'sensor/platform\s+(\w+)/(\w+)', line)
                if match:
                    sensor = match.group(1)
                    platform = match.group(2)
                    rad['satellites'].append(f"{sensor}_{platform}")
                    
            # Channel usage
            if "channel" in line and "use=" in line:
                rad['total_channels'] += 1
                if "use=1" in line or "use= 1" in line:
                    rad['used_channels'] += 1
                    
    def extract_minimization(self):
        """Extract minimization information"""
        minim = self.analysis['minimization']
        minim['outer_loops'] = []
        current_outer = None
        
        for i, line in enumerate(self.content):
            # Outer loop start
            if "Begin J table inner/outer loop" in line:
                match = re.search(r'loop\s+(\d+)\s+(\d+)', line)
                if match:
                    current_outer = {
                        'inner': int(match.group(1)),
                        'outer': int(match.group(2)),
                        'cost_function': {}
                    }
                    
            # Cost function components
            if current_outer and "J Global" in line:
                match = re.search(r'J Global\s+([\d.E+-]+)', line)
                if match:
                    current_outer['cost_function']['total'] = float(match.group(1))
                    minim['outer_loops'].append(current_outer)
                    current_outer = None
                    
            # Individual J components
            if current_outer:
                for term in ['background', 'surface pressure', 'temperature', 'wind', 'moisture', 'ozone', 'sst']:
                    if term in line and not any(x in line for x in ['End J table', 'Begin J table']):
                        match = re.search(r'([\d.E+-]+)\s*$', line)
                        if match:
                            try:
                                value = float(match.group(1))
                                current_outer['cost_function'][term] = value
                            except ValueError:
                                pass
                        
            # Convergence info
            if "pcgsoi: gnorm" in line:
                match = re.search(r'gnorm.*=\s*([\d.E+-]+)', line)
                if match:
                    if 'convergence' not in minim:
                        minim['convergence'] = []
                    minim['convergence'].append(float(match.group(1)))
                    
    def extract_increments(self):
        """Extract analysis increments"""
        inc = self.analysis['increments']
        
        # Pattern for increment lines
        inc_pattern = re.compile(r'increment\s+(\w+)\s+([\d.E+-]+)\s+([\d.E+-]+)\s+([\d.E+-]+)')
        
        for line in self.content:
            match = inc_pattern.search(line)
            if match:
                var_name = match.group(1)
                inc[var_name] = {
                    'mean': float(match.group(2)),
                    'min': float(match.group(3)),
                    'max': float(match.group(4))
                }
                
    def extract_performance(self):
        """Extract performance metrics"""
        perf = self.analysis['performance']
        
        for line in self.content:
            # Wall time
            if "The total amount of wall time" in line:
                match = re.search(r'=\s*([\d.]+)', line)
                if match:
                    perf['wall_time_seconds'] = float(match.group(1))
                    
            # User time
            if "time in user mode" in line:
                match = re.search(r'=\s*([\d.]+)', line)
                if match:
                    perf['user_time_seconds'] = float(match.group(1))
                    
            # System time
            if "time in sys mode" in line:
                match = re.search(r'=\s*([\d.]+)', line)
                if match:
                    perf['sys_time_seconds'] = float(match.group(1))
                    
            # Memory
            if "maximum resident set size" in line:
                match = re.search(r'=\s*(\d+)', line)
                if match:
                    perf['max_memory_kb'] = int(match.group(1))
                    perf['max_memory_mb'] = perf['max_memory_kb'] / 1024
                    
        # Calculate efficiency
        if 'wall_time_seconds' in perf and 'user_time_seconds' in perf and 'sys_time_seconds' in perf:
            cpu_time = perf['user_time_seconds'] + perf['sys_time_seconds']
            perf['efficiency_percent'] = (cpu_time / perf['wall_time_seconds']) * 100
            
    def identify_issues(self):
        """Identify potential issues and warnings"""
        issues = self.analysis['issues']
        
        for line in self.content:
            # Warnings
            if "WARNING" in line or "***WARNING***" in line:
                issues.append({'type': 'warning', 'message': line.strip()})
                
            # Errors
            if "ERROR" in line or "***ERROR***" in line:
                issues.append({'type': 'error', 'message': line.strip()})
                
            # Missing data
            if "not available" in line or "no profiles to process" in line:
                issues.append({'type': 'missing_data', 'message': line.strip()})
                
            # Bad observations
            if "bad ikx" in line:
                match = re.search(r'num_bad_ikx.*=\s*(\d+)', line)
                if match:
                    issues.append({
                        'type': 'bad_observations',
                        'count': int(match.group(1)),
                        'message': line.strip()
                    })
                    
    def generate_report(self) -> str:
        """Generate a formatted analysis report"""
        report = []
        report.append("=" * 80)
        report.append("GSI STDOUT ANALYSIS REPORT")
        report.append("=" * 80)
        report.append("")
        
        # Execution Status
        report.append("EXECUTION STATUS:")
        status = self.analysis['execution_status']
        if status.get('completed'):
            report.append("  Status: COMPLETED SUCCESSFULLY")
        else:
            report.append("  Status: INCOMPLETE OR FAILED")
        if 'analysis_time' in status:
            report.append(f"  Analysis Time: {status['analysis_time']}")
        report.append("")
        
        # Configuration
        report.append("CONFIGURATION:")
        config = self.analysis['configuration']
        for key, value in config.items():
            if key != 'hybrid_mode':  # Handle separately
                report.append(f"  {key}: {value}")
                
        # Hybrid mode detection
        ensemble = self.analysis['ensemble_info']
        if config.get('hybrid_mode') or ensemble.get('uses_ensemble'):
            report.append("  hybrid_mode: ENABLED")
            if ensemble.get('n_members'):
                report.append(f"  ensemble_members: {ensemble['n_members']}")
        else:
            report.append("  hybrid_mode: DISABLED (Pure 3D/4D-Var)")
        report.append("")
        
        # Grid Information
        report.append("GRID INFORMATION:")
        grid = self.analysis['grid_info']
        if 'nx' in grid:
            report.append(f"  Grid Dimensions: {grid['nx']} x {grid['ny']} x {grid['nz']}")
        if 'dx_meters' in grid:
            report.append(f"  Grid Spacing: {grid['dx_meters']/1000:.1f} km")
        if 'lat_min' in grid:
            report.append(f"  Domain: {grid['lat_min']:.1f}°N to {grid['lat_max']:.1f}°N, "
                         f"{grid['lon_min']:.1f}°E to {grid['lon_max']:.1f}°E")
        report.append("")
        
        # Background Fields
        if self.analysis['background_fields']:
            report.append("BACKGROUND FIELD RANGES:")
            for field, stats in sorted(self.analysis['background_fields'].items()):
                unit = self._get_field_unit(field)
                report.append(f"  {field:10s}: {stats['min']:12.2f} to {stats['max']:12.2f} {unit}")
            report.append("")
        
        # Observations
        report.append("OBSERVATIONS:")
        obs = self.analysis['observations']
        if obs['types']:
            report.append(f"  Total Read: {obs['total_read']:,}")
            report.append(f"  Total Used: {obs['total_used']:,}")
            report.append(f"  Overall Usage Rate: {obs.get('overall_usage_rate', 0)*100:.1f}%")
            report.append("  By Type:")
            for obs_type, stats in sorted(obs['types'].items()):
                report.append(f"    {obs_type:10s}: {stats['kept']:6,} used of {stats['read']:6,} "
                            f"({stats['usage_rate']*100:5.1f}%)")
                            
            # Thinning information
            if obs['thinning']:
                report.append("  Thinning Mesh Sizes:")
                for obs_type, mesh in sorted(obs['thinning'].items()):
                    report.append(f"    {obs_type:10s}: {mesh:.0f} km")
        report.append("")
        
        # Radiance Information
        rad = self.analysis['radiance_info']
        if rad['has_radiances']:
            report.append("SATELLITE RADIANCES:")
            report.append(f"  Status: CONFIGURED")
            report.append(f"  Sensors: {len(rad['satellites'])}")
            if rad['total_channels'] > 0:
                report.append(f"  Channels: {rad['used_channels']} used of {rad['total_channels']} "
                            f"({rad['used_channels']/rad['total_channels']*100:.1f}%)")
        else:
            report.append("SATELLITE RADIANCES:")
            report.append("  Status: NOT USED")
        report.append("")
        
        # Minimization
        report.append("MINIMIZATION:")
        minim = self.analysis['minimization']
        if minim['outer_loops']:
            for loop in minim['outer_loops']:
                report.append(f"  Outer Loop {loop['outer']}:")
                report.append(f"    Total Cost Function J: {loop['cost_function'].get('total', 0):.1f}")
                for term, value in sorted(loop['cost_function'].items()):
                    if term != 'total':
                        report.append(f"    {term}: {value:.1f}")
        if 'convergence' in minim and minim['convergence']:
            report.append(f"  Final Gradient Norm: {minim['convergence'][-1]:.2e}")
        report.append("")
        
        # Analysis Increments
        report.append("ANALYSIS INCREMENTS:")
        inc = self.analysis['increments']
        if inc:
            report.append("  Variable      Mean         Min          Max")
            report.append("  " + "-" * 45)
            for var, stats in sorted(inc.items()):
                report.append(f"  {var:10s} {stats['mean']:10.2e} {stats['min']:10.2e} {stats['max']:10.2e}")
        report.append("")
        
        # Performance
        report.append("PERFORMANCE:")
        perf = self.analysis['performance']
        if 'wall_time_seconds' in perf:
            report.append(f"  Wall Time: {perf['wall_time_seconds']:.1f} seconds")
        if 'efficiency_percent' in perf:
            report.append(f"  CPU Efficiency: {perf['efficiency_percent']:.1f}%")
        if 'max_memory_mb' in perf:
            report.append(f"  Max Memory: {perf['max_memory_mb']:.1f} MB")
        report.append("")
        
        # Issues
        if self.analysis['issues']:
            report.append("ISSUES AND WARNINGS:")
            issue_counts = defaultdict(int)
            for issue in self.analysis['issues']:
                issue_counts[issue['type']] += 1
            
            for issue_type, count in sorted(issue_counts.items()):
                report.append(f"  {issue_type}: {count} occurrences")
                
            # Show first few examples
            report.append("  Examples:")
            for issue in self.analysis['issues'][:5]:
                msg = issue['message'][:80] + "..." if len(issue['message']) > 80 else issue['message']
                report.append(f"    [{issue['type']}] {msg}")
        
        report.append("")
        report.append("=" * 80)
        
        return '\n'.join(report)
    
    def _get_field_unit(self, field_name: str) -> str:
        """Get appropriate unit for field"""
        units = {
            'ps': 'Pa',
            'prse': 'Pa',
            'tv': 'K',
            'tsen': 'K',
            'q': 'kg/kg',
            'u': 'm/s',
            'v': 'm/s',
            'sst': 'K'
        }
        return units.get(field_name, '')
    
    def save_json(self, output_file: str):
        """Save analysis results as JSON"""
        with open(output_file, 'w') as f:
            json.dump(self.analysis, f, indent=2)
            
    def run(self):
        """Run the complete analysis"""
        self.read_file()
        self.analyze()
        return self.generate_report()


def main():
    """Main function"""
    json_file = None
    args = sys.argv[1:]

    if args and not args[0].startswith('--'):
        stdout_file = args.pop(0)
    else:
        script_dir = os.path.dirname(os.path.abspath(__file__))
        stdout_file = os.path.join(script_dir, 'gsi_stdout')

    if '--json' in args:
        try:
            json_idx = args.index('--json')
            json_file = args[json_idx + 1]
        except (IndexError, ValueError):
            print("Usage: python analyze_gsi_stdout.py [<gsi_stdout_file>] [--json output.json]")
            sys.exit(1)

    analyzer = GSIAnalyzer(stdout_file)
    
    try:
        report = analyzer.run()
        print(report)
        
        if json_file:
            output_data = copy.deepcopy(analyzer.analysis)
            if 'issues' in output_data:
                output_data['issues'] = [
                    issue for issue in output_data['issues']
                    if issue.get('type') != 'warning'
                ]
            with open(json_file, 'w') as f:
                json.dump(output_data, f, indent=2)
            print(f"\nJSON analysis saved to: {json_file}")
            
    except Exception as e:
        print(f"Error analyzing file: {e}", file=sys.stderr)
        sys.exit(1)


if __name__ == "__main__":
    main() 