"""
File upload and processing router with async support
"""
import os
import uuid
import time
import asyncio
from pathlib import Path
from typing import Optional, List, Dict, Any
import logging
import aiofiles
from fastapi import APIRouter, UploadFile, File, Form, HTTPException, BackgroundTasks
from fastapi.responses import JSONResponse
import httpx

from ..parsers import get_parser, FileType
from ..utils.file_detector import file_detector
from ..parsers.base import ParseResult
from ..utils.cache_manager import get_cache_manager
from ..utils.file_cleanup import get_file_cleanup_manager

logger = logging.getLogger(__name__)

router = APIRouter(prefix="/upload", tags=["File Upload & Processing"])


class FileProcessor:
    """
    Async file processor for handling multiple file formats
    """
    
    def __init__(self, upload_dir: str = "uploads"):
        self.upload_dir = Path(upload_dir)
        self.upload_dir.mkdir(exist_ok=True)
        
        # Create subdirectories
        (self.upload_dir / "temp").mkdir(exist_ok=True)
        (self.upload_dir / "processed").mkdir(exist_ok=True)
        
        # Processing status tracking
        self.processing_status: Dict[str, Dict[str, Any]] = {}
    
    async def save_uploaded_file(self, upload_file: UploadFile) -> tuple[str, Path]:
        """
        Save uploaded file to disk asynchronously
        
        Returns:
            Tuple of (file_id, file_path)
        """
        # Generate unique file ID
        file_id = str(uuid.uuid4())
        
        # Create safe filename
        safe_filename = f"{file_id}_{upload_file.filename}"
        file_path = self.upload_dir / "temp" / safe_filename
        
        # Save file asynchronously
        async with aiofiles.open(file_path, 'wb') as f:
            content = await upload_file.read()
            await f.write(content)
        
        logger.info(f"Saved uploaded file: {safe_filename}")
        return file_id, file_path
    
    async def download_from_url(self, url: str) -> tuple[str, Path]:
        """
        Download file from URL asynchronously
        
        Returns:
            Tuple of (file_id, file_path)
        """
        file_id = str(uuid.uuid4())
        
        async with httpx.AsyncClient() as client:
            response = await client.get(url)
            response.raise_for_status()
            
            # Try to get filename from URL or Content-Disposition header
            filename = None
            if 'content-disposition' in response.headers:
                import re
                cd = response.headers['content-disposition']
                match = re.search(r'filename="?([^"]+)"?', cd)
                if match:
                    filename = match.group(1)
            
            if not filename:
                filename = url.split('/')[-1] or f"download_{file_id}"
            
            # Create safe filename
            safe_filename = f"{file_id}_{filename}"
            file_path = self.upload_dir / "temp" / safe_filename
            
            # Save file
            async with aiofiles.open(file_path, 'wb') as f:
                await f.write(response.content)
        
        logger.info(f"Downloaded file from URL: {safe_filename}")
        return file_id, file_path
    
    async def process_file(self, file_id: str, file_path: Path, parser_config: Dict[str, Any] = None) -> ParseResult:
        """
        Process file and extract content with caching support
        """
        cache_manager = await get_cache_manager()
        
        try:
            # Update status
            self.processing_status[file_id] = {
                "status": "processing",
                "started_at": time.time(),
                "file_path": str(file_path)
            }
            
            # Try to get cached result first
            if cache_manager and cache_manager.redis_client:
                cached_result = await cache_manager.get_cached_result(file_path, parser_config)
                if cached_result:
                    # Update status with cached result
                    self.processing_status[file_id].update({
                        "status": "completed" if cached_result.success else "failed",
                        "completed_at": time.time(),
                        "file_type": cached_result.file_type.value if hasattr(cached_result, 'file_type') else "unknown",
                        "result": cached_result.to_dict(),
                        "from_cache": True
                    })
                    logger.info(f"Returning cached result for file {file_id}")
                    return cached_result
            
            # Detect file type
            file_type = file_detector.detect_file_type(file_path)
            
            if file_type == FileType.UNKNOWN:
                raise ValueError("Unsupported file type")
            
            # Temporary fix: directly use TextParser for text files to avoid coroutine issue
            if file_type == FileType.PLAIN_TEXT:
                from ..parsers.text_parser import TextParser
                parser = TextParser()
                logger.info(f"Using direct TextParser: {type(parser)}")
                result = parser.parse(file_path)
                logger.info(f"Direct result: {type(result)}, success: {hasattr(result, 'success')}")
            else:
                # Get appropriate parser
                parser = get_parser(file_type)
                
                # Parse file with configuration
                if parser_config and hasattr(parser, 'parse'):
                    # Check if parser accepts additional parameters
                    import inspect
                    sig = inspect.signature(parser.parse)
                    if len(sig.parameters) > 1:  # More than just file_path
                        result = parser.parse(file_path, **parser_config)
                    else:
                        result = parser.parse(file_path)
                else:
                    result = parser.parse(file_path)
            
            # Handle potential coroutine results from async parsers
            import asyncio
            logger.info(f"Parser type: {type(parser)}, Parser class: {parser.__class__.__name__}")
            logger.info(f"Parser method: {parser.parse}, is coroutine function: {asyncio.iscoroutinefunction(parser.parse)}")
            logger.info(f"Result type: {type(result)}, is coroutine: {asyncio.iscoroutine(result)}")
            if asyncio.iscoroutine(result):
                logger.info("Awaiting coroutine result")
                result = await result
            logger.info(f"Final result type: {type(result)}, has success: {hasattr(result, 'success')}")
            
            # Cache the result if caching is available
            if cache_manager and cache_manager.redis_client and result.success:
                await cache_manager.cache_result(file_path, result, parser_config)
            
            # Update status
            self.processing_status[file_id].update({
                "status": "completed" if result.success else "failed",
                "completed_at": time.time(),
                "file_type": file_type.value,
                "result": result.to_dict(),
                "from_cache": False
            })
            
            return result
            
        except Exception as e:
            # Update status with error
            self.processing_status[file_id].update({
                "status": "failed",
                "completed_at": time.time(),
                "error": str(e)
            })
            
            logger.error(f"Error processing file {file_id}: {e}")
            raise
    
    def get_processing_status(self, file_id: str) -> Optional[Dict[str, Any]]:
        """Get processing status for a file"""
        return self.processing_status.get(file_id)
    
    async def cleanup_temp_file(self, file_path: Path, delay: int = 300):
        """
        Clean up temporary file after delay (default 5 minutes)
        """
        await asyncio.sleep(delay)
        
        # Use file cleanup manager if available
        cleanup_manager = get_file_cleanup_manager()
        
        try:
            if file_path.exists():
                # Invalidate cache for this file
                cache_manager = await get_cache_manager()
                if cache_manager and cache_manager.redis_client:
                    await cache_manager.invalidate_cache(file_path)
                
                # Delete the file
                file_path.unlink()
                logger.info(f"Cleaned up temporary file: {file_path}")
                
                # Update cleanup stats if manager is available
                if cleanup_manager:
                    cleanup_manager.stats["files_deleted"] += 1
                    cleanup_manager.stats["bytes_freed"] += file_path.stat().st_size if file_path.exists() else 0
                    
        except Exception as e:
            logger.error(f"Error cleaning up file {file_path}: {e}")


# Global file processor instance
file_processor = FileProcessor()


@router.post("/file")
async def upload_file(
    background_tasks: BackgroundTasks,
    file: UploadFile = File(...),
    process_immediately: bool = Form(True),
    language: str = Form("eng"),
    preprocessing: bool = Form(True)
):
    """
    Upload and optionally process a file
    
    Args:
        file: The file to upload
        process_immediately: Whether to process file immediately or return file_id for later processing
        language: OCR language for image files (default: eng)
        preprocessing: Whether to apply preprocessing for image files
    """
    try:
        # Validate file size
        if not file.filename:
            raise HTTPException(status_code=400, detail="No file provided")
        
        # Save uploaded file
        file_id, file_path = await file_processor.save_uploaded_file(file)
        
        # Schedule cleanup
        background_tasks.add_task(file_processor.cleanup_temp_file, file_path)
        
        if process_immediately:
            # Process file immediately
            try:
                # Prepare parser configuration
                parser_config = {
                    "language": language,
                    "preprocessing": preprocessing
                }
                
                result = await file_processor.process_file(file_id, file_path, parser_config)
                
                return {
                    "file_id": file_id,
                    "filename": file.filename,
                    "processing_status": "completed",
                    "result": result.to_dict()
                }
                
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"Processing failed: {str(e)}")
        
        else:
            # Return file_id for async processing
            return {
                "file_id": file_id,
                "filename": file.filename,
                "processing_status": "uploaded",
                "message": f"File uploaded successfully. Use /upload/process/{file_id} to process."
            }
    
    except HTTPException:
        raise
    except Exception as e:
        logger.error(f"Upload error: {e}")
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/url")
async def upload_from_url(
    background_tasks: BackgroundTasks,
    url: str = Form(...),
    process_immediately: bool = Form(True),
    language: str = Form("eng"),
    preprocessing: bool = Form(True)
):
    """
    Download file from URL and optionally process it
    
    Args:
        url: URL to download file from
        process_immediately: Whether to process file immediately
        language: OCR language for image files
        preprocessing: Whether to apply preprocessing for image files
    """
    try:
        # Download file
        file_id, file_path = await file_processor.download_from_url(url)
        
        # Schedule cleanup
        background_tasks.add_task(file_processor.cleanup_temp_file, file_path)
        
        if process_immediately:
            # Process file immediately
            try:
                # Prepare parser configuration
                parser_config = {
                    "language": language,
                    "preprocessing": preprocessing
                }
                
                result = await file_processor.process_file(file_id, file_path, parser_config)
                
                return {
                    "file_id": file_id,
                    "url": url,
                    "processing_status": "completed",
                    "result": result.to_dict()
                }
                
            except Exception as e:
                raise HTTPException(status_code=500, detail=f"Processing failed: {str(e)}")
        
        else:
            # Return file_id for async processing
            return {
                "file_id": file_id,
                "url": url,
                "processing_status": "downloaded",
                "message": f"File downloaded successfully. Use /upload/process/{file_id} to process."
            }
    
    except httpx.HTTPError as e:
        raise HTTPException(status_code=400, detail=f"Download failed: {str(e)}")
    except Exception as e:
        logger.error(f"URL upload error: {e}")
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/process/{file_id}")
async def process_file_async(
    file_id: str,
    background_tasks: BackgroundTasks,
    language: str = Form("eng"),
    preprocessing: bool = Form(True)
):
    """
    Process a previously uploaded file asynchronously
    
    Args:
        file_id: File ID from previous upload
        language: OCR language for image files
        preprocessing: Whether to apply preprocessing for image files
    """
    # Check if file exists in processing status
    status = file_processor.get_processing_status(file_id)
    
    if not status:
        raise HTTPException(status_code=404, detail="File not found")
    
    if status["status"] == "processing":
        return {"file_id": file_id, "status": "already_processing"}
    
    if status["status"] == "completed":
        return {
            "file_id": file_id,
            "status": "completed",
            "result": status["result"]
        }
    
    # Start async processing
    file_path = Path(status["file_path"])
    if not file_path.exists():
        raise HTTPException(status_code=404, detail="File no longer exists")
    
    # Process in background
    background_tasks.add_task(file_processor.process_file, file_id, file_path)
    
    return {
        "file_id": file_id,
        "status": "processing_started",
        "message": f"Processing started. Check status with /upload/status/{file_id}"
    }


@router.get("/status/{file_id}")
async def get_processing_status(file_id: str):
    """
    Get processing status for a file
    """
    status = file_processor.get_processing_status(file_id)
    
    if not status:
        raise HTTPException(status_code=404, detail="File not found")
    
    # Calculate processing time if completed
    if "completed_at" in status and "started_at" in status:
        status["processing_time"] = status["completed_at"] - status["started_at"]
    
    return {"file_id": file_id, **status}


@router.post("/batch")
async def upload_batch_files(
    background_tasks: BackgroundTasks,
    files: List[UploadFile] = File(...),
    process_immediately: bool = Form(True)
):
    """
    Upload multiple files for batch processing
    """
    if len(files) > 10:  # Limit batch size
        raise HTTPException(status_code=400, detail="Maximum 10 files allowed in batch")
    
    results = []
    
    for file in files:
        try:
            # Save uploaded file
            file_id, file_path = await file_processor.save_uploaded_file(file)
            
            # Schedule cleanup
            background_tasks.add_task(file_processor.cleanup_temp_file, file_path)
            
            file_result = {
                "file_id": file_id,
                "filename": file.filename,
                "status": "uploaded"
            }
            
            if process_immediately:
                try:
                    result = await file_processor.process_file(file_id, file_path)
                    file_result.update({
                        "status": "completed",
                        "result": result.to_dict()
                    })
                except Exception as e:
                    file_result.update({
                        "status": "failed",
                        "error": str(e)
                    })
            
            results.append(file_result)
            
        except Exception as e:
            results.append({
                "filename": file.filename,
                "status": "failed",
                "error": str(e)
            })
    
    return {
        "batch_id": str(uuid.uuid4()),
        "total_files": len(files),
        "results": results
    }


@router.get("/supported-formats")
async def get_supported_formats():
    """
    Get list of supported file formats
    """
    return {
        "supported_types": [ft.value for ft in FileType if ft != FileType.UNKNOWN],
        "supported_extensions": file_detector.get_supported_extensions(),
        "supported_mime_types": file_detector.get_supported_mime_types()
    }


@router.delete("/cleanup/{file_id}")
async def cleanup_file(file_id: str):
    """
    Manually cleanup a processed file
    """
    status = file_processor.get_processing_status(file_id)
    
    if not status:
        raise HTTPException(status_code=404, detail="File not found")
    
    file_path = Path(status["file_path"])
    
    try:
        if file_path.exists():
            file_path.unlink()
            
        # Remove from processing status
        if file_id in file_processor.processing_status:
            del file_processor.processing_status[file_id]
            
        return {"message": f"File {file_id} cleaned up successfully"}
        
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Cleanup failed: {str(e)}")


@router.get("/cache/stats")
async def get_cache_statistics():
    """
    Get cache statistics and information
    """
    cache_manager = await get_cache_manager()
    if not cache_manager or not cache_manager.redis_client:
        return {"error": "Cache not available"}
    
    return await cache_manager.get_cache_stats()


@router.post("/cache/clear")
async def clear_cache():
    """
    Clear all cached results
    """
    cache_manager = await get_cache_manager()
    if not cache_manager or not cache_manager.redis_client:
        raise HTTPException(status_code=503, detail="Cache not available")
    
    success = await cache_manager.clear_cache()
    if success:
        return {"message": "Cache cleared successfully"}
    else:
        raise HTTPException(status_code=500, detail="Failed to clear cache")


@router.delete("/cache/invalidate")
async def invalidate_file_cache(file_path: str):
    """
    Invalidate cache for a specific file
    """
    cache_manager = await get_cache_manager()
    if not cache_manager or not cache_manager.redis_client:
        raise HTTPException(status_code=503, detail="Cache not available")
    
    from pathlib import Path
    path = Path(file_path)
    
    success = await cache_manager.invalidate_cache(path)
    if success:
        return {"message": f"Cache invalidated for {file_path}"}
    else:
        raise HTTPException(status_code=500, detail="Failed to invalidate cache")


@router.get("/storage/info")
async def get_storage_info():
    """
    Get storage usage information
    """
    cleanup_manager = get_file_cleanup_manager()
    if not cleanup_manager:
        return {"error": "File cleanup manager not available"}
    
    return await cleanup_manager.get_storage_info()


@router.post("/storage/cleanup")
async def manual_cleanup(max_age_hours: Optional[int] = None):
    """
    Manually trigger file cleanup
    
    Args:
        max_age_hours: Custom age limit in hours (optional)
    """
    cleanup_manager = get_file_cleanup_manager()
    if not cleanup_manager:
        raise HTTPException(status_code=503, detail="File cleanup manager not available")
    
    result = await cleanup_manager.manual_cleanup(max_age_hours)
    return result


@router.post("/storage/enforce-quota")
async def enforce_storage_quota():
    """
    Manually enforce storage quota
    """
    cleanup_manager = get_file_cleanup_manager()
    if not cleanup_manager:
        raise HTTPException(status_code=503, detail="File cleanup manager not available")
    
    result = await cleanup_manager.enforce_storage_quota()
    return result


# Export file processor for use by other modules
__all__ = ["router", "file_processor", "FileProcessor"]