#!/usr/bin/env python3
"""
GSI/EnKF Documentation Generation Script

This script generates comprehensive documentation from GSI/EnKF Fortran source files
that have been processed with structured comments by the automated code commentary system.

This is a standalone utility that uses the fortran_commentator package and 
documentation_generator utility.

Usage:
    python generate_gsi_docs.py --source-dir /path/to/gsi/src --output-dir ./docs
    python generate_gsi_docs.py --gsi-classification ./gsi_classification --output-dir ./docs --formats html sphinx
    
Author: GSI Development Team
Date: 2025-01-12
"""

import argparse
import sys
from pathlib import Path
import logging

# Import the local documentation generator
from documentation_generator import DocumentationManager

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('gsi_docs_generation.log'),
        logging.StreamHandler(sys.stdout)
    ]
)
logger = logging.getLogger(__name__)


def discover_gsi_classification_files(classification_dir: Path) -> dict:
    """
    Discover GSI source files organized by classification.
    
    Args:
        classification_dir: Path to gsi_classification directory
        
    Returns:
        Dictionary mapping classification names to file lists
    """
    classifications = {
        'background_grid': [],
        'core_analysis': [], 
        'io_interface': [],
        'observation_processing': [],
        'utilities': []
    }
    
    for class_name in classifications.keys():
        class_dir = classification_dir / class_name
        if class_dir.exists():
            # Find Fortran files in this classification
            fortran_extensions = {'.f90', '.f', '.f95', '.f03', '.f08', '.F90', '.F', '.F95', '.F03', '.F08'}
            for ext in fortran_extensions:
                files = list(class_dir.rglob(f'*{ext}'))
                classifications[class_name].extend(files)
    
    return classifications


def generate_classification_specific_docs(doc_manager: DocumentationManager,
                                        classifications: dict,
                                        output_dir: Path,
                                        formats: list) -> dict:
    """
    Generate documentation for each GSI classification separately.
    
    Args:
        doc_manager: Documentation manager instance
        classifications: Dictionary of classification -> files mapping
        output_dir: Base output directory
        formats: List of documentation formats to generate
        
    Returns:
        Dictionary of generation results by classification
    """
    results = {}
    
    for class_name, files in classifications.items():
        if not files:
            logger.info(f"No files found for classification: {class_name}")
            continue
            
        logger.info(f"Generating documentation for {class_name} ({len(files)} files)")
        
        class_output_dir = output_dir / class_name
        
        try:
            # Generate documentation for this classification
            class_results = doc_manager.generate_documentation(
                source_paths=files,
                output_dir=class_output_dir,
                formats=formats,
                analyze_relationships=True
            )
            
            results[class_name] = class_results
            logger.info(f"Successfully generated {class_name} documentation")
            
        except Exception as e:
            logger.error(f"Failed to generate {class_name} documentation: {e}")
            results[class_name] = {'error': str(e)}
    
    return results


def generate_combined_docs(doc_manager: DocumentationManager,
                          all_files: list,
                          output_dir: Path,
                          formats: list) -> dict:
    """
    Generate combined documentation for all GSI files.
    
    Args:
        doc_manager: Documentation manager instance
        all_files: List of all source files
        output_dir: Output directory
        formats: List of documentation formats
        
    Returns:
        Generation results
    """
    logger.info(f"Generating combined documentation for {len(all_files)} files")
    
    try:
        results = doc_manager.generate_documentation(
            source_paths=all_files,
            output_dir=output_dir,
            formats=formats,
            analyze_relationships=True
        )
        
        logger.info("Successfully generated combined documentation")
        return results
        
    except Exception as e:
        logger.error(f"Failed to generate combined documentation: {e}")
        return {'error': str(e)}


def print_generation_summary(results: dict, title: str) -> None:
    """Print a summary of documentation generation results."""
    print(f"\n{title}")
    print("=" * len(title))
    
    if 'error' in results:
        print(f"❌ Generation failed: {results['error']}")
        return
    
    if 'parsed_files' in results:
        print(f"📄 Processed {results['parsed_files']} source files")
    
    if 'generation_results' in results:
        for format_name, result in results['generation_results'].items():
            status = "✅" if result.get('success', False) else "❌"
            print(f"{status} {format_name.upper()}: {result.get('output_dir', 'N/A')}")
    
    if 'relationship_data' in results and results['relationship_data']:
        viz_stats = results['relationship_data']['visualization_data']['statistics']
        print(f"🔗 Relationships: {viz_stats['total_nodes']} nodes, {viz_stats['total_edges']} edges")


def main():
    """Main function for GSI documentation generation."""
    parser = argparse.ArgumentParser(
        description='Generate comprehensive documentation for GSI/EnKF codebase',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog='''
Examples:
  # Generate from GSI source directory
  python generate_gsi_docs.py --source-dir /path/to/gsi/src --output-dir ./gsi_docs
  
  # Generate from classified GSI files  
  python generate_gsi_docs.py --gsi-classification ./gsi_classification --output-dir ./gsi_docs
  
  # Generate specific formats with separate classification docs
  python generate_gsi_docs.py --gsi-classification ./gsi_classification \\
                               --output-dir ./gsi_docs \\
                               --formats html sphinx \\
                               --separate-classifications
        '''
    )
    
    # Input options (mutually exclusive)
    input_group = parser.add_mutually_exclusive_group(required=True)
    input_group.add_argument('--source-dir',
                           help='Path to GSI source directory')
    input_group.add_argument('--gsi-classification', 
                           help='Path to gsi_classification directory with categorized files')
    
    # Output options
    parser.add_argument('--output-dir', required=True,
                       help='Output directory for generated documentation')
    parser.add_argument('--formats', nargs='+',
                       choices=['html', 'sphinx', 'doxygen'],
                       default=['html'],
                       help='Documentation formats to generate (default: html)')
    
    # Generation options
    parser.add_argument('--separate-classifications', action='store_true',
                       help='Generate separate documentation for each GSI classification')
    parser.add_argument('--project-name',
                       default='GSI/EnKF Documentation',
                       help='Project name for generated documentation')
    parser.add_argument('--build-sphinx', action='store_true',
                       help='Build HTML from generated Sphinx files')
    parser.add_argument('--build-doxygen', action='store_true',
                       help='Build HTML from generated Doxygen files')
    parser.add_argument('--verbose', action='store_true',
                       help='Enable verbose logging')
    
    args = parser.parse_args()
    
    # Configure logging level
    if args.verbose:
        logging.getLogger().setLevel(logging.DEBUG)
    
    # Validate paths
    output_dir = Path(args.output_dir)
    output_dir.mkdir(parents=True, exist_ok=True)
    
    if args.source_dir:
        source_path = Path(args.source_dir)
        if not source_path.exists():
            logger.error(f"Source directory not found: {source_path}")
            return 1
        source_files = [source_path]
    else:
        classification_path = Path(args.gsi_classification)
        if not classification_path.exists():
            logger.error(f"GSI classification directory not found: {classification_path}")
            return 1
    
    try:
        # Initialize documentation manager
        logger.info(f"Initializing documentation manager for: {args.project_name}")
        doc_manager = DocumentationManager(args.project_name)
        
        if args.source_dir:
            # Generate from single source directory
            logger.info(f"Processing source directory: {args.source_dir}")
            
            results = doc_manager.generate_documentation(
                source_paths=[Path(args.source_dir)],
                output_dir=output_dir / 'combined',
                formats=args.formats,
                analyze_relationships=True
            )
            
            print_generation_summary(results, "Combined GSI Documentation")
            
        else:
            # Process GSI classification directory
            logger.info(f"Processing GSI classification directory: {args.gsi_classification}")
            
            # Discover files by classification
            classifications = discover_gsi_classification_files(Path(args.gsi_classification))
            
            # Print file counts by classification
            print("\nGSI Files by Classification:")
            total_files = 0
            for class_name, files in classifications.items():
                print(f"  {class_name}: {len(files)} files")
                total_files += len(files)
            print(f"  Total: {total_files} files")
            
            if total_files == 0:
                logger.error("No Fortran files found in classification directories")
                return 1
            
            # Generate separate documentation for each classification
            if args.separate_classifications:
                classification_results = generate_classification_specific_docs(
                    doc_manager, classifications, output_dir, args.formats
                )
                
                for class_name, results in classification_results.items():
                    print_generation_summary(results, f"{class_name.replace('_', ' ').title()} Documentation")
            
            # Generate combined documentation
            all_files = []
            for files in classifications.values():
                all_files.extend(files)
            
            if all_files:
                combined_results = generate_combined_docs(
                    doc_manager, all_files, output_dir / 'combined', args.formats
                )
                print_generation_summary(combined_results, "Combined GSI Documentation")
        
        # Build documentation if requested
        if args.build_sphinx and 'sphinx' in args.formats:
            logger.info("Building Sphinx documentation...")
            from documentation_generator import build_sphinx_docs
            sphinx_dirs = list(output_dir.rglob('sphinx'))
            for sphinx_dir in sphinx_dirs:
                build_sphinx_docs(sphinx_dir)
        
        if args.build_doxygen and 'doxygen' in args.formats:
            logger.info("Building Doxygen documentation...")
            from documentation_generator import build_doxygen_docs
            doxygen_dirs = list(output_dir.rglob('doxygen'))
            for doxygen_dir in doxygen_dirs:
                build_doxygen_docs(doxygen_dir)
        
        print(f"\n✅ Documentation generation completed!")
        print(f"📁 Output directory: {output_dir}")
        print(f"📋 Log file: gsi_docs_generation.log")
        
        return 0
        
    except Exception as e:
        logger.error(f"Documentation generation failed: {e}")
        import traceback
        traceback.print_exc()
        return 1


if __name__ == '__main__':
    exit(main())