import os
import uuid
from typing import Optional, Tuple
from datetime import datetime, timedelta
import aiofiles
from pathlib import Path
import boto3
from minio import Minio
from minio.error import S3Error
from app.core.config import settings
import logging

logger = logging.getLogger(__name__)


class StorageService:
    """Service for handling file storage across different backends"""
    
    def __init__(self):
        self.storage_type = settings.storage_type
        self._init_storage()
    
    def _init_storage(self):
        """Initialize the storage backend"""
        if self.storage_type == "s3":
            self.s3_client = boto3.client(
                's3',
                aws_access_key_id=settings.aws_access_key_id,
                aws_secret_access_key=settings.aws_secret_access_key,
                region_name=settings.aws_region
            )
            self.bucket_name = settings.s3_bucket_name
            
        elif self.storage_type == "minio":
            self.minio_client = Minio(
                settings.minio_endpoint,
                access_key=settings.minio_access_key,
                secret_key=settings.minio_secret_key,
                secure=False  # Set to True for HTTPS
            )
            self.bucket_name = settings.s3_bucket_name
            self._ensure_minio_bucket()
            
        elif self.storage_type == "local":
            self.local_storage_path = Path(settings.model_path).parent / "uploads"
            self.local_storage_path.mkdir(parents=True, exist_ok=True)
    
    def _ensure_minio_bucket(self):
        """Ensure MinIO bucket exists"""
        try:
            if not self.minio_client.bucket_exists(self.bucket_name):
                self.minio_client.make_bucket(self.bucket_name)
                logger.info(f"Created MinIO bucket: {self.bucket_name}")
        except S3Error as e:
            logger.error(f"Failed to create MinIO bucket: {e}")
            raise
    
    def _generate_filename(self, original_filename: str, prefix: str = "") -> str:
        """Generate a unique filename"""
        ext = Path(original_filename).suffix
        unique_id = str(uuid.uuid4())
        timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
        
        if prefix:
            return f"{prefix}_{timestamp}_{unique_id}{ext}"
        else:
            return f"{timestamp}_{unique_id}{ext}"
    
    async def save_file(
        self,
        file_content: bytes,
        original_filename: str,
        prefix: str = ""
    ) -> str:
        """Save a file to storage"""
        filename = self._generate_filename(original_filename, prefix)
        
        if self.storage_type == "local":
            return await self._save_local(file_content, filename)
        elif self.storage_type == "s3":
            return await self._save_s3(file_content, filename)
        elif self.storage_type == "minio":
            return await self._save_minio(file_content, filename)
        else:
            raise ValueError(f"Unsupported storage type: {self.storage_type}")
    
    async def _save_local(self, file_content: bytes, filename: str) -> str:
        """Save file to local storage"""
        file_path = self.local_storage_path / filename
        
        async with aiofiles.open(file_path, 'wb') as f:
            await f.write(file_content)
        
        return str(file_path)
    
    async def _save_s3(self, file_content: bytes, filename: str) -> str:
        """Save file to S3"""
        import asyncio
        
        def _upload():
            self.s3_client.put_object(
                Bucket=self.bucket_name,
                Key=filename,
                Body=file_content,
                ContentType='image/png'
            )
            return f"s3://{self.bucket_name}/{filename}"
        
        # Run S3 upload in thread pool
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, _upload)
    
    async def _save_minio(self, file_content: bytes, filename: str) -> str:
        """Save file to MinIO"""
        import asyncio
        
        def _upload():
            self.minio_client.put_object(
                self.bucket_name,
                filename,
                io.BytesIO(file_content),
                len(file_content),
                content_type='image/png'
            )
            return f"minio://{settings.minio_endpoint}/{self.bucket_name}/{filename}"
        
        # Run MinIO upload in thread pool
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, _upload)
    
    async def get_file(self, file_path: str) -> Optional[bytes]:
        """Retrieve a file from storage"""
        if self.storage_type == "local":
            return await self._get_local(file_path)
        elif self.storage_type == "s3":
            return await self._get_s3(file_path)
        elif self.storage_type == "minio":
            return await self._get_minio(file_path)
        else:
            raise ValueError(f"Unsupported storage type: {self.storage_type}")
    
    async def _get_local(self, file_path: str) -> Optional[bytes]:
        """Get file from local storage"""
        try:
            async with aiofiles.open(file_path, 'rb') as f:
                return await f.read()
        except FileNotFoundError:
            logger.warning(f"File not found: {file_path}")
            return None
    
    async def _get_s3(self, file_path: str) -> Optional[bytes]:
        """Get file from S3"""
        import asyncio
        
        def _download():
            try:
                key = file_path.replace(f"s3://{self.bucket_name}/", "")
                response = self.s3_client.get_object(
                    Bucket=self.bucket_name,
                    Key=key
                )
                return response['Body'].read()
            except Exception as e:
                logger.error(f"Failed to download from S3: {e}")
                return None
        
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, _download)
    
    async def _get_minio(self, file_path: str) -> Optional[bytes]:
        """Get file from MinIO"""
        import asyncio
        
        def _download():
            try:
                key = file_path.split(f"minio://{settings.minio_endpoint}/{self.bucket_name}/")[1]
                response = self.minio_client.get_object(self.bucket_name, key)
                return response.read()
            except Exception as e:
                logger.error(f"Failed to download from MinIO: {e}")
                return None
        
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, _download)
    
    async def delete_file(self, file_path: str) -> bool:
        """Delete a file from storage"""
        try:
            if self.storage_type == "local":
                Path(file_path).unlink(missing_ok=True)
            elif self.storage_type == "s3":
                key = file_path.replace(f"s3://{self.bucket_name}/", "")
                self.s3_client.delete_object(Bucket=self.bucket_name, Key=key)
            elif self.storage_type == "minio":
                key = file_path.split(f"minio://{settings.minio_endpoint}/{self.bucket_name}/")[1]
                self.minio_client.remove_object(self.bucket_name, key)
            
            return True
        except Exception as e:
            logger.error(f"Failed to delete file: {e}")
            return False
    
    def get_file_url(self, filename: str) -> str:
        """Get public URL for a file"""
        if self.storage_type == "local":
            return f"/static/uploads/{filename}"
        elif self.storage_type == "s3":
            return f"https://{self.bucket_name}.s3.amazonaws.com/{filename}"
        elif self.storage_type == "minio":
            return f"http://{settings.minio_endpoint}/{self.bucket_name}/{filename}"
        else:
            raise ValueError(f"Unsupported storage type: {self.storage_type}")


# Global storage instance
storage_service = StorageService()
