"""API endpoints for cleaning configuration management"""

from fastapi import APIRouter, HTTPException, status
from pydantic import BaseModel
from typing import Dict, Any, Optional
import yaml
import json
from pathlib import Path
import logging

logger = logging.getLogger(__name__)

router = APIRouter(prefix="/api/v1/crawler/cleaning", tags=["cleaning"])

# Default config path
CONFIG_PATH = Path(__file__).parent.parent / 'config' / 'cleaning_config.yaml'

# Global cleaning pipeline instance (for preview)
_preview_pipeline = None


class CleaningConfig(BaseModel):
    """Cleaning configuration model"""
    version: str = "1.0"
    pipeline: Dict[str, Any]
    cleaners: list
    logging: Optional[Dict[str, Any]] = None
    performance: Optional[Dict[str, Any]] = None
    output: Optional[Dict[str, Any]] = None


class CleaningPreviewRequest(BaseModel):
    """Request model for cleaning preview"""
    data: Dict[str, Any]
    config: Optional[CleaningConfig] = None


class CleaningPreviewResponse(BaseModel):
    """Response model for cleaning preview"""
    original: Dict[str, Any]
    cleaned: Dict[str, Any]
    stats: Optional[Dict[str, Any]] = None


@router.get("/config", response_model=CleaningConfig)
async def get_cleaning_config():
    """Get current cleaning configuration
    
    Returns:
        Current cleaning configuration
    """
    try:
        if CONFIG_PATH.exists():
            with open(CONFIG_PATH, 'r', encoding='utf-8') as f:
                config = yaml.safe_load(f)
                return CleaningConfig(**config)
        else:
            raise HTTPException(
                status_code=status.HTTP_404_NOT_FOUND,
                detail=f"Configuration file not found at {CONFIG_PATH}"
            )
    except Exception as e:
        logger.error(f"Failed to load cleaning config: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=str(e)
        )


@router.put("/config", response_model=CleaningConfig)
async def update_cleaning_config(config: CleaningConfig):
    """Update cleaning configuration
    
    Args:
        config: New cleaning configuration
        
    Returns:
        Updated configuration
    """
    try:
        # Validate configuration structure
        if not config.cleaners:
            raise HTTPException(
                status_code=status.HTTP_400_BAD_REQUEST,
                detail="Configuration must include at least one cleaner"
            )
        
        # Save configuration
        CONFIG_PATH.parent.mkdir(parents=True, exist_ok=True)
        
        with open(CONFIG_PATH, 'w', encoding='utf-8') as f:
            yaml.dump(config.dict(), f, allow_unicode=True)
        
        logger.info("Cleaning configuration updated successfully")
        
        # Reset preview pipeline to reload config
        global _preview_pipeline
        _preview_pipeline = None
        
        return config
        
    except Exception as e:
        logger.error(f"Failed to update cleaning config: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=str(e)
        )


@router.post("/preview", response_model=CleaningPreviewResponse)
async def preview_cleaning(request: CleaningPreviewRequest):
    """Preview cleaning effect on sample data
    
    Args:
        request: Preview request with data and optional config
        
    Returns:
        Original and cleaned data comparison
    """
    try:
        global _preview_pipeline
        
        # Create or update preview pipeline
        if request.config:
            # Use provided config
            from ..cleaning.pipeline import CleaningPipeline
            
            # Save temp config
            temp_config = Path("/tmp/preview_config.yaml")
            with open(temp_config, 'w') as f:
                yaml.dump(request.config.dict(), f)
            
            _preview_pipeline = CleaningPipeline(str(temp_config))
        elif _preview_pipeline is None:
            # Load default config
            from ..cleaning.pipeline import CleaningPipeline
            _preview_pipeline = CleaningPipeline(str(CONFIG_PATH))
        
        # Process data
        original = request.data.copy()
        cleaned = _preview_pipeline.process(request.data)
        
        # Get statistics
        stats = _preview_pipeline.get_stats()
        
        return CleaningPreviewResponse(
            original=original,
            cleaned=cleaned,
            stats=stats
        )
        
    except ImportError as e:
        logger.error(f"Cleaning modules not available: {e}")
        raise HTTPException(
            status_code=status.HTTP_501_NOT_IMPLEMENTED,
            detail="Cleaning modules are not installed"
        )
    except Exception as e:
        logger.error(f"Failed to preview cleaning: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=str(e)
        )


@router.get("/stats")
async def get_cleaning_stats():
    """Get cleaning statistics
    
    Returns:
        Cleaning pipeline statistics
    """
    try:
        global _preview_pipeline
        
        if _preview_pipeline is None:
            from ..cleaning.pipeline import CleaningPipeline
            _preview_pipeline = CleaningPipeline(str(CONFIG_PATH))
        
        stats = _preview_pipeline.get_stats()
        
        # Add configuration info
        stats['config_path'] = str(CONFIG_PATH)
        stats['config_exists'] = CONFIG_PATH.exists()
        
        return stats
        
    except Exception as e:
        logger.error(f"Failed to get cleaning stats: {e}")
        raise HTTPException(
            status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
            detail=str(e)
        )