import asyncio
import io
from loguru import logger
from minio import Minio
from typing import List
from minio.deleteobjects import DeleteObject
from concurrent.futures import ThreadPoolExecutor

from ..chunk_io import ChunkIO


def file_name_for_param(index: int) -> str:
    """Generate a standardized filename for a parameter file based on its index.

    Args:
        index: The parameter index

    Returns:
        A formatted filename string
    """
    return f"param_{index:08d}"


class NoMoreFileException(Exception):
    """Exception raised when there are no more files to download."""

    pass


class DependencyTaskError(Exception):
    """Exception raised when a dependency task has failed."""

    pass


class MinioFileManager:
    """Class for managing file operations with Minio.

    This class handles downloading, uploading, and listing files in a Minio bucket.
    Configuration is loaded from environment variables with defaults from the codebase.
    """

    def __init__(
        self,
        bucket_name: str = "taskfiles",
        server: str = None,
        access_key: str = None,
        secret_key: str = None,
    ):
        """Initialize MinioFileManager.

        Args:
            bucket_name: Name of the Minio bucket to use
            server: Minio server address (defaults to env var MINIO_SERVER or "server:9001")
            access_key: Minio access key (defaults to env var MINIO_ACCESS_KEY or from code)
            secret_key: Minio secret key (defaults to env var MINIO_SECRET_KEY or from code)
        """
        self.bucket_name = bucket_name
        self.server = server
        self.access_key = access_key
        self.secret_key = secret_key

        self.client = Minio(
            self.server,
            access_key=self.access_key,
            secret_key=self.secret_key,
            secure=False,  # Use HTTP instead of HTTPS
            cert_check=False,
        )
        self.thread_pool = ThreadPoolExecutor(max_workers=30)
        # Ensure bucket exists
        self._ensure_bucket_exists()

    def _ensure_bucket_exists(self) -> None:
        """Ensure the configured bucket exists, creating it if needed."""
        found = self.client.bucket_exists(self.bucket_name)
        if not found:
            self.client.make_bucket(self.bucket_name)
            logger.debug(f"Created bucket {self.bucket_name}")
        else:
            logger.debug(f"Bucket {self.bucket_name} already exists")

    def read_chunk(self, object_name: str, offset: int, length: int) -> bytes:
        """Get a stream of a file from Minio.
        Args:
            object_name: The object name in the Minio bucket
        """
        response = None
        len_file = self.client.stat_object(self.bucket_name, object_name).size
        if offset >= len_file:
            return b""
        if offset + length > len_file:
            length = len_file - offset
        try:
            response = self.client.get_object(
                self.bucket_name,
                object_name,
                offset=offset,
                length=length,
            )
            # Read data from response.
            return response.read(length)
        finally:
            if response:
                response.close()
                response.release_conn()

    def _write(
        self, io_stream: ChunkIO, object_name: str, content_type: str | None = None
    ) -> None:
        """Write a file to Minio."""
        logger.info(f"Writing file {object_name} to Minio")
        self.client.put_object(
            self.bucket_name,
            object_name,
            io_stream,
            length=-1,
            content_type=content_type,
            part_size=2**20 * 5,
            num_parallel_uploads=0,
        )

    async def write_file(
        self, io_stream: ChunkIO, object_name: str, content_type: str | None = None
    ):
        """Upload a file to Minio.

        Args:
            io_stream: The ChunkIO stream to upload
            object_name: The object name to use in the Minio bucket
            content_type: The content type of the file. If None, it will be inferred from the file extension.

        Raises:
            Exception: If the upload fails
        """
        if content_type is None:
            content_type = "application/octet-stream"

        # Use apply_async for non-blocking behavior
        result = asyncio.get_running_loop().run_in_executor(
            self.thread_pool,
            self._write,
            io_stream,
            object_name,
            content_type,
        )
        return await result

    def list_files(self, prefix: str = "", recursive: bool = True) -> List[str]:
        """List all files with a specific prefix.

        Args:
            prefix: Prefix to filter objects by
            recursive: Whether to recursively list objects

        Returns:
            List of object names

        Raises:
            Exception: If listing fails
        """
        try:
            objects = self.client.list_objects(
                self.bucket_name, prefix=prefix, recursive=recursive
            )
            return [obj.object_name for obj in objects]
        except Exception as e:
            raise Exception(f"Failed to list files with prefix {prefix}: {str(e)}")

    def delete_prefix(self, prefix: str) -> None:
        """Delete all files with a specific prefix.

        Args:
            prefix: Prefix to delete objects by
        """
        list_objects = self.list_files(prefix, recursive=True)
        resp = self.client.remove_objects(
            self.bucket_name,
            [DeleteObject(obj) for obj in list_objects],
        )
        resp = list(resp)
        logger.info(f"Deleted {len(resp)} objects")
        logger.info(f"found {len(list_objects)} objects")
        logger.info(f"{list_objects}")
        return resp
