import json
from datetime import datetime
from typing import Dict, Any, Optional

from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
from loguru import logger
from pydantic import BaseModel

from ai_platform.config.settings import settings


class MessageModel(BaseModel):
    event_type: str
    file_id: str
    original_filename: str
    download_url: str
    file_size: int
    content_type: str
    upload_time: str
    bucket_name: str
    object_name: str
    knowledge_base_id: str


class KafkaService:
    """Kafka消息队列服务"""

    def __init__(self):
        """初始化Kafka服务"""
        self.bootstrap_servers = settings.kafka_bootstrap_servers
        self.client_id = settings.kafka_client_id
        self.producer: Optional[AIOKafkaProducer] = None
        self.consumer: Optional[AIOKafkaConsumer] = None

    async def start_producer(self):
        """启动Kafka生产者"""
        try:
            self.producer = AIOKafkaProducer(
                bootstrap_servers=self.bootstrap_servers,
                client_id=f"{self.client_id}-producer",
                value_serializer=lambda v: json.dumps(v, ensure_ascii=False).encode('utf-8')
            )
            await self.producer.start()
            logger.info("Kafka生产者启动成功")
        except Exception:
            logger.exception(f"Kafka生产者启动失败")
            raise

    async def stop_producer(self):
        """停止Kafka生产者"""
        if self.producer:
            try:
                await self.producer.stop()
                logger.info("Kafka生产者已停止")
            except Exception as e:
                logger.exception(f"停止Kafka生产者失败: {e}")

    async def start_consumer(self, topics=None):
        """启动Kafka消费者"""
        if topics is None:
            topics = [settings.kafka_topic_uploads]
        try:
            self.consumer = AIOKafkaConsumer(
                *topics,
                bootstrap_servers=self.bootstrap_servers,
                client_id=f"{self.client_id}-consumer",
                value_deserializer=lambda m: json.loads(m.decode('utf-8'))
            )
            await self.consumer.start()
            logger.info(f"Kafka消费者启动成功，订阅主题: {topics}")
        except Exception as e:
            logger.exception(f"Kafka消费者启动失败: {e}")
            raise

    async def stop_consumer(self):
        """停止Kafka消费者"""
        if self.consumer:
            try:
                await self.consumer.stop()
                logger.info("Kafka消费者已停止")
            except Exception as e:
                logger.exception(f"停止Kafka消费者失败: {e}")

    async def send_message(self, topic: str, message: Dict[str, Any]) -> bool:
        """
        发送消息到Kafka主题
        
        Args:
            topic: 主题名称
            message: 消息内容
            
        Returns:
            是否发送成功
        """
        if not self.producer:
            await self.start_producer()

        try:
            # 添加时间戳
            message["timestamp"] = datetime.now().isoformat()

            await self.producer.send(topic, message)
            logger.info(f"消息发送成功到主题 {topic}: {message}")
            return True
        except Exception as e:
            logger.exception(f"发送消息失败: {e}")
            return False

    async def send_file_upload_message(self, upload_info: Dict[str, Any]) -> bool:
        """
        发送文件上传消息
        
        Args:
            upload_info: 文件上传信息
            
        Returns:
            是否发送成功
        """
        message = MessageModel(
            event_type="file_upload",
            file_id=upload_info["file_id"],
            original_filename=upload_info["original_filename"],
            download_url=upload_info["download_url"],
            file_size=upload_info["file_size"],
            content_type=upload_info["content_type"],
            upload_time=upload_info["upload_time"],
            bucket_name=upload_info["bucket_name"],
            object_name=upload_info["object_name"],
            knowledge_base_id=upload_info["knowledge_base_id"]
        )

        return await self.send_message(settings.kafka_topic_uploads, message.model_dump())

    async def consume_messages(self, callback=None):
        """
        消费消息
        
        Args:
            callback: 消息处理回调函数
        """
        if not self.consumer:
            logger.exception("消费者未启动")
            return

        try:
            from ai_platform.pipeline.data_pipeline import FileDataPipeline
            pipeline = FileDataPipeline()
            async for message in self.consumer:
                try:
                    logger.info(f"收到消息: {message.topic}:{message.partition}:{message.offset} -> {message.value}")
                    msg_model = MessageModel(**message.value)
                    await pipeline.process_minio_file(knowledge_base_id=msg_model.knowledge_base_id, object_name=msg_model.object_name,
                                                      bucket_name=msg_model.bucket_name)
                    if callback:
                        await callback(message.topic, message.value)

                except Exception as e:
                    logger.exception(f"处理消息失败: {e}")

        except Exception as e:
            logger.exception(f"消费消息失败: {e}")

    async def health_check(self) -> bool:
        """
        健康检查
        
        Returns:
            服务是否健康
        """
        try:
            # 尝试创建临时生产者进行连接测试
            test_producer = AIOKafkaProducer(
                bootstrap_servers=self.bootstrap_servers,
                client_id=f"{self.client_id}-health-check"
            )
            await test_producer.start()
            await test_producer.stop()
            return True
        except Exception as e:
            logger.exception(f"Kafka健康检查失败: {e}")
            return False
