from typing import Optional, List
from fastapi import UploadFile, BackgroundTasks
from app.core.storage.factory import StorageFactory
from app.models.file_model import FileModel, StorageProviderModel
from app.schemas.common import PaginationParams, PaginationResult
from app.schemas.admin.admin_file_manage_schemas import (
    FileDetailResponse,
    FileListFilter,
    FileMigrateRequest,
    FileStatusUpdateRequest,
)
from app.core.exceptions import APIException
from app.core.constants import (
    FileStatusEnum,
    MAX_FILE_SIZE,
    ALLOWED_FILE_TYPES,
    CHUNK_SIZE,
    MAX_BATCH_SIZE,
)
from app.core.i18n import t
from logging import getLogger
from app.services.base import filter_query

logger = getLogger(__name__)


class FileManageService:
    """文件管理服务，提供文件上传、下载、删除等基础操作"""

    async def get_file_list(
        self, pagination: PaginationParams, filters: FileListFilter
    ) -> PaginationResult[FileDetailResponse]:
        # 定义特殊过滤条件，支持模糊搜索
        special_filter_conditions = {
            "username": "username__icontains",  # 用户名模糊匹配
            "email": "email__icontains",  # 邮箱模糊匹配
            "phone": "phone__icontains",  # 手机号模糊匹配
        }
        # 预加载关联数据，减少查询次数
        query = FileModel.all().prefetch_related("provider", "uploader")
        query = await filter_query(query, filters, special_filter_conditions)

        # 获取总数和分页数据
        total = await query.count()
        items = await query.offset(pagination.get_skip()).limit(pagination.page_size).all()

        # 转换为响应对象
        response_items = []
        for item in items:
            response_items.append(await self._to_detail_response(item))

        return PaginationResult(total=total, items=response_items)

    async def get_file_detail(self, file_id: int) -> FileDetailResponse:
        # 预加载关联数据，减少查询次数
        file = await FileModel.get_or_none(id=file_id).prefetch_related("provider", "uploader")
        if not file:
            raise APIException(message=t("error.file.not_found"))

        return await self._to_detail_response(file)

    async def upload_file(
        self, file: UploadFile, user_id: int, background_tasks: BackgroundTasks, provider_id: Optional[int] = None
    ) -> FileDetailResponse:
        # 校验文件类型
        if file.content_type not in ALLOWED_FILE_TYPES:
            raise APIException(message=t("error.file.invalid_type"))

        # 校验文件大小
        if file.size > MAX_FILE_SIZE:
            raise APIException(message=t("error.file.size_exceeded"))

        # 获取存储策略实例
        storage = await StorageFactory.get_storage(provider_id)

        # 计算文件MD5，用于去重
        md5 = await storage.calculate_md5(file)

        # 检查文件是否已存在（基于MD5）
        existing_file = await FileModel.filter(md5=md5).first().prefetch_related("provider", "uploader")
        if existing_file:
            return await self._to_detail_response(existing_file)

        # 获取存储供应商信息
        provider = await StorageProviderModel.get_or_none(id=provider_id if provider_id else storage.provider_id)
        if not provider:
            raise APIException(message=t("error.file.provider_not_found"))

        # 大文件使用分片上传（>5MB）
        if file.size > CHUNK_SIZE:
            # 先创建文件记录，状态为上传中
            file_model = await FileModel.create(
                filename=file.filename,
                provider=provider,
                size=file.size,
                content_type=file.content_type,
                md5=md5,
                uploader_id=user_id,
                status=FileStatusEnum.UPLOADING,
            )
            # 预加载关联数据
            await file_model.fetch_related("provider", "uploader")

            # 将实际的文件上传任务放入后台
            background_tasks.add_task(self._upload_large_file, file, file_model.id, storage)

            return await self._to_detail_response(file_model)

        # 小文件直接上传
        storage_path = await storage.save(file, file.filename)

        # 保存文件信息到数据库
        file_model = await FileModel.create(
            filename=file.filename,
            storage_path=storage_path,
            provider=provider,
            size=file.size,
            content_type=file.content_type,
            md5=md5,
            uploader_id=user_id,
            status=FileStatusEnum.COMPLETED,
        )
        # 预加载关联数据
        await file_model.fetch_related("provider", "uploader")

        return await self._to_detail_response(file_model)

    async def delete_file(self, file_id: int) -> None:
        # 获取文件信息
        file = await FileModel.get_or_none(id=file_id)
        if not file:
            raise APIException(message=t("error.file.not_found"))

        # 获取存储策略实例
        storage = await StorageFactory.get_storage(file.provider_id)

        # 从存储中删除文件
        if not await storage.delete(file.storage_path):
            raise APIException(message=t("error.file.delete_failed"))

        # 删除数据库记录
        await file.delete()

    async def batch_delete_files(self, file_ids: List[int]) -> None:
        # 检查批量操作数量限制
        if len(file_ids) > MAX_BATCH_SIZE:
            raise APIException(message=t("error.file.batch_limit_exceeded"))

        # 获取文件信息，预加载供应商数据
        files = await FileModel.filter(id__in=file_ids).prefetch_related("provider")
        if not files:
            raise APIException(message=t("error.file.not_found"))

        # 按供应商分组，减少存储策略实例化次数
        provider_files = {}
        for file in files:
            if file.provider_id not in provider_files:
                provider_files[file.provider_id] = []
            provider_files[file.provider_id].append(file)

        # 批量删除文件
        for provider_id, provider_files in provider_files.items():
            storage = await StorageFactory.get_storage(provider_id)
            for file in provider_files:
                # 确保文件从存储中删除成功
                if not await storage.delete(file.storage_path):
                    raise APIException(message=t("error.file.delete_failed"))
                await file.delete()

    async def migrate_files(self, request: FileMigrateRequest) -> None:
        # 检查批量操作数量限制
        if len(request.file_ids) > MAX_BATCH_SIZE:
            raise APIException(message=t("error.file.batch_limit_exceeded"))

        # 获取目标存储供应商
        target_provider = await StorageProviderModel.get_or_none(id=request.target_provider_id)
        if not target_provider:
            raise APIException(message=t("error.file.provider_not_found"))

        # 获取需要迁移的文件列表
        files = await FileModel.filter(id__in=request.file_ids).prefetch_related("provider")
        if not files:
            raise APIException(message=t("error.file.not_found"))

        # 获取目标存储策略实例
        target_storage = await StorageFactory.get_storage(target_provider.id)

        # 迁移文件
        for file in files:
            # 获取源存储策略实例
            source_storage = await StorageFactory.get_storage(file.provider_id)

            # 从源存储获取文件
            source_file = await source_storage.get(file.storage_path)
            if not source_file:
                logger.error(f"文件{file.id}获取失败，跳过迁移")
                continue

            # 上传到目标存储
            target_path = await target_storage.save(source_file, file.filename)

            # 更新文件记录
            await file.update(provider=target_provider, storage_path=target_path)

            # 删除源文件
            await source_storage.delete(file.storage_path)

    async def update_file_status(self, request: FileStatusUpdateRequest) -> None:
        # 检查批量操作数量限制
        if len(request.file_ids) > MAX_BATCH_SIZE:
            raise APIException(message=t("error.file.batch_limit_exceeded"))

        # 批量更新文件状态
        count = await FileModel.filter(id__in=request.file_ids).update(status=request.status)
        if count == 0:
            raise APIException(message=t("error.file.not_found"))

    async def _to_detail_response(self, file: FileModel) -> FileDetailResponse:
        # 获取文件访问URL
        storage = await StorageFactory.get_storage(file.provider_id)
        url = await storage.get_url(file.storage_path) if file.storage_path else None

        # 确保关联数据已加载
        if not hasattr(file, "_fetched"):
            await file.fetch_related("provider", "uploader")

        # 转换为响应对象
        return FileDetailResponse(
            id=file.id,
            filename=file.filename,
            size=file.size,
            content_type=file.content_type,
            url=url,
            created_at=file.created_at,
            provider_id=file.provider_id,
            provider_name=file.provider.name if file.provider else None,
            storage_path=file.storage_path,
            md5=file.md5,
            status=file.status,
            uploader_id=file.uploader_id,
            uploader_name=file.uploader.username if file.uploader else None,
        )

    async def _upload_large_file(self, file: UploadFile, file_id: int, storage: "BaseStorage"):  # type: ignore
        # 获取文件记录
        file_model = await FileModel.get(id=file_id)
        upload_id = None

        try:
            # 初始化分片上传
            upload_id = await storage.init_multipart_upload(file.filename, file.content_type)

            parts = []
            part_number = 1

            # 分片上传文件
            while chunk := await file.read(CHUNK_SIZE):
                etag = await storage.upload_part(upload_id, part_number, chunk)
                parts.append({"PartNumber": part_number, "ETag": etag})
                part_number += 1

            # 完成分片上传
            storage_path = await storage.complete_multipart_upload(upload_id, parts)

            # 更新文件记录状态为已完成
            await file_model.update(storage_path=storage_path, status=FileStatusEnum.COMPLETED)

        except Exception as e:
            logger.error(f"大文件上传失败: {str(e)}")
            # 更新文件状态为失败
            await file_model.update(status=FileStatusEnum.FAILED)
            # 清理已上传的分片
            if upload_id:
                await storage.abort_multipart_upload(upload_id)
            raise APIException(message=t("error.file.upload_failed"))
