"""
Filesystem 模块 - S3 实现
"""
import asyncio
from typing import AsyncIterator, List

import fsspec
from botocore.exceptions import ClientError

from axiom_boot.conf.manager import Settings
from axiom_boot.di import autowired, service, conditional_on_setting
from axiom_boot.filesystem.exceptions import FileSystemException, FileNotFoundException
from axiom_boot.filesystem.interfaces import IFileSystem
from axiom_boot.logging.setup import get_logger

logger = get_logger(__name__)


@service(name="s3filesystem")
@conditional_on_setting(key="filesystem__enabled")
@conditional_on_setting(key="filesystem__provider", expected_value="s3")
class S3FileSystem(IFileSystem):
    """
    一个使用 AWS S3 (或兼容协议，如 MinIO) 作为后端的 IFileSystem 实现。
    """

    def __init__(self, settings: Settings = autowired()):
        self._settings = settings
        s3_config = self._settings.filesystem.s3
        
        try:
            # 关键：确保使用 asynchronous=True 来获取异步版本
            self._fs: fsspec.AbstractFileSystem = fsspec.filesystem(
                "s3",
                asynchronous=True,
                key=s3_config.access_key,
                secret=s3_config.secret_key,
                token=s3_config.session_token,
                client_kwargs={
                    "endpoint_url": s3_config.endpoint_url,
                    "region_name": s3_config.region_name,
                }
            )
            # s3fs 实例，用于执行 aiobotocore 支持的底层操作
            self._s3 = self._fs.fs.s3
        except Exception as e:
            logger.error(f"初始化 S3 fsspec 文件系统时失败: {e}", exc_info=True)
            raise FileSystemException(f"无法初始化 S3 文件系统: {e}")

        self._default_bucket = s3_config.default_bucket
        logger.info(f"S3 文件系统已初始化。默认存储桶: '{self._default_bucket}'。")

    def _get_bucket_name(self, bucket_name: str | None = None) -> str:
        """获取要操作的桶名，如果未提供则使用默认桶。"""
        target_bucket = bucket_name or self._default_bucket
        if not target_bucket:
            raise FileSystemException("未指定 bucket_name 且没有配置默认存储桶。")
        return target_bucket
    
    def _get_full_path(self, path: str, bucket_name: str | None = None) -> str:
        """返回在指定存储桶中的完整路径 (bucket/key)。"""
        return f"{self._get_bucket_name(bucket_name)}/{path.lstrip('/')}"

    # --- 桶/目录级别操作 ---

    async def create_bucket(self, bucket_name: str, **kwargs) -> bool:
        try:
            # aiobotocore 的 create_bucket 是一个 coroutine
            await self._s3.create_bucket(Bucket=bucket_name, **kwargs)
            logger.info(f"S3 存储桶 '{bucket_name}' 已成功创建。")
            return True
        except ClientError as e:
            # 如果桶已存在，也视为成功
            if e.response['Error']['Code'] == 'BucketAlreadyOwnedByYou':
                logger.warning(f"S3 存储桶 '{bucket_name}' 已存在。")
                return True
            logger.error(f"创建 S3 存储桶 '{bucket_name}' 时失败: {e}", exc_info=True)
            raise FileSystemException(f"无法创建 S3 存储桶 '{bucket_name}': {e}")

    async def delete_bucket(self, bucket_name: str, **kwargs) -> bool:
        try:
            await self._s3.delete_bucket(Bucket=bucket_name, **kwargs)
            logger.info(f"S3 存储桶 '{bucket_name}' 已被删除。")
            return True
        except ClientError as e:
            logger.error(f"删除 S3 存储桶 '{bucket_name}' 时失败: {e}", exc_info=True)
            raise FileSystemException(f"无法删除 S3 存储桶 '{bucket_name}': {e}")

    async def bucket_exists(self, bucket_name: str, **kwargs) -> bool:
        try:
            await self._s3.head_bucket(Bucket=bucket_name)
            return True
        except ClientError as e:
            if e.response['Error']['Code'] == '404':
                return False
            # 其他错误应向上抛出
            raise

    async def list_buckets(self) -> List[str]:
        try:
            response = await self._s3.list_buckets()
            return [bucket['Name'] for bucket in response.get('Buckets', [])]
        except ClientError as e:
            logger.error(f"列出 S3 存储桶时失败: {e}", exc_info=True)
            raise FileSystemException(f"无法列出 S3 存储桶: {e}")

    # --- 文件/对象级别操作 ---

    async def save(self, stream: AsyncIterator[bytes], path: str, bucket_name: str | None = None, **kwargs) -> str:
        full_path = self._get_full_path(path, bucket_name)
        try:
            # fsspec 的 open 方法在异步模式下返回一个异步上下文管理器
            async with self._fs.open(full_path, 'wb', **kwargs) as f:
                async for chunk in stream:
                    await f.write(chunk)
            logger.debug(f"文件已成功上传到 S3: {full_path}")
            return full_path
        except Exception as e:
            logger.error(f"上传文件到 S3 '{full_path}' 时发生错误: {e}", exc_info=True)
            raise FileSystemException(f"无法保存文件 '{path}' 到 S3")

    async def read(self, path: str, bucket_name: str | None = None, chunk_size: int = 8192, **kwargs) -> AsyncIterator[bytes]:
        full_path = self._get_full_path(path, bucket_name)
        
        if not await self.exists(path, bucket_name):
            raise FileNotFoundException(f"文件在 S3 中不存在: '{path}' in bucket '{self._get_bucket_name(bucket_name)}'")
            
        try:
            async with self._fs.open(full_path, 'rb', **kwargs) as f:
                while True:
                    chunk = await f.read(chunk_size)
                    if not chunk:
                        break
                    yield chunk
        except Exception as e:
            logger.error(f"从 S3 读取文件 '{full_path}' 时发生错误: {e}", exc_info=True)
            raise FileSystemException(f"无法从 S3 读取文件 '{path}'")

    async def delete(self, path: str, bucket_name: str | None = None, **kwargs) -> bool:
        if not await self.exists(path, bucket_name):
            logger.warning(f"尝试删除一个不存在的 S3 对象: {path}")
            return False
            
        full_path = self._get_full_path(path, bucket_name)
        try:
            await self._fs.rm(full_path)
            logger.debug(f"S3 对象已成功删除: {full_path}")
            return True
        except Exception as e:
            logger.error(f"删除 S3 对象 '{full_path}' 时发生错误: {e}", exc_info=True)
            raise FileSystemException(f"无法删除 S3 对象 '{path}'")

    async def exists(self, path: str, bucket_name: str | None = None, **kwargs) -> bool:
        full_path = self._get_full_path(path, bucket_name)
        return await self._fs.exists(full_path)

    async def get_presigned_url(self, path: str, bucket_name: str | None = None, expires_in: int = 3600, **kwargs) -> str:
        target_bucket = self._get_bucket_name(bucket_name)
        try:
            loop = asyncio.get_running_loop()
            url = await loop.run_in_executor(
                None,
                lambda: self._fs.url(f"{target_bucket}/{path}", expires=expires_in)
            )
            return url
        except Exception as e:
            logger.error(f"为 S3 对象 '{path}' 生成预签名 URL 时失败: {e}", exc_info=True)
            raise FileSystemException(f"无法为 S3 对象 '{path}' 生成预签名 URL")
