import asyncio
import json
from typing import Dict
from aiokafka import AIOKafkaConsumer, AIOKafkaProducer
from loguru import logger
from tenacity import retry, stop_after_attempt, wait_fixed
from concurrent.futures import ThreadPoolExecutor

from web.configs.config import settings
from web.services.processing_service import process_pdf_task
from web.utils.redis_utils import RedisClient


class KafkaProducerService:
    def __init__(self):
        self.producer = AIOKafkaProducer(
            bootstrap_servers=settings.kafka_bootstrap_servers,
            value_serializer=lambda v: json.dumps(v).encode('utf-8')
        )
        self.redis_client = RedisClient()

    async def start(self):
        await self.producer.start()

    async def stop(self):
        await self.producer.stop()

    @retry(stop=stop_after_attempt(settings.retry_max_attempts), wait=wait_fixed(settings.retry_wait_fixed))
    async def send_task(self, doc_id: str, callback_url: str = None, topic: str = None):
        task_data = {
            "doc_id": doc_id,
            "callback_url": callback_url,
            "timestamp": asyncio.get_event_loop().time()
        }
        topic = topic or settings.pdf_topic
        await self.producer.send_and_wait(topic, task_data)


class KafkaConsumerService:
    def __init__(self):
        self.consumer = AIOKafkaConsumer(
            settings.pdf_topic,
            bootstrap_servers=settings.kafka_bootstrap_servers,
            group_id=settings.consumer_group,
            value_deserializer=lambda m: json.loads(m.decode('utf-8')),
            enable_auto_commit=False,
            # 增加会话超时和轮询间隔时间
            session_timeout_ms=300000,  # 5分钟
            max_poll_interval_ms=3600000  # 1小时
        )
        self.redis_client = RedisClient()
        # 创建线程池处理耗时任务
        self.executor = ThreadPoolExecutor(max_workers=settings.max_workers)
        # 任务跟踪
        self.in_progress_tasks = {}

    async def start(self):
        await self.consumer.start()
        asyncio.create_task(self.consume())
        asyncio.create_task(self.heartbeat())

    async def stop(self):
        await self.consumer.stop()
        self.executor.shutdown()

    async def heartbeat(self):
        """定期向Kafka发送心跳，避免会话超时"""
        while True:
            await asyncio.sleep(30)  # 每30秒发送一次心跳
            if self.in_progress_tasks:
                logger.debug(f"Heartbeat: {len(self.in_progress_tasks)} tasks in progress")

    async def consume(self):
        logger.info("PDF task consumer started")
        try:
            async for msg in self.consumer:
                try:
                    task_data = msg.value
                    doc_id = task_data['doc_id']

                    # 检查是否已处理
                    if await self.redis_client.is_processed(doc_id):
                        logger.info(f"Skipping already processed doc_id: {doc_id}")
                        await self.consumer.commit()
                        continue

                    # 提交任务到线程池
                    self.submit_task(task_data, msg)

                except Exception as e:
                    logger.error(f"Error processing task {task_data.get('doc_id')}: {str(e)}")
                    await self.handle_failure(task_data)
        finally:
            logger.info("Stopping PDF task consumer")

    def submit_task(self, task_data: Dict, msg):
        """提交任务到线程池处理"""
        doc_id = task_data['doc_id']
        logger.info(f"Submitting task {doc_id} to thread pool")

        # 保存任务元数据用于状态跟踪
        self.in_progress_tasks[doc_id] = {
            'task_data': task_data,
            'message': msg,
            'start_time': asyncio.get_event_loop().time()
        }

        # 在线程池中执行耗时任务
        # process_pdf_task
        future = self.executor.submit(asyncio.run, process_pdf_task(task_data))
        future.add_done_callback(lambda f: self.task_completed(doc_id, f))

    def task_completed(self, doc_id: str, future):
        """任务完成后的回调处理"""
        try:
            future.result()  # 获取任务结果，抛出可能的异常
            logger.info(f"Task {doc_id} completed successfully")

            # 获取任务元数据并提交偏移量
            task_info = self.in_progress_tasks.pop(doc_id, None)
            if task_info:
                asyncio.create_task(self.commit_message(task_info['message']))
        except Exception as e:
            logger.error(f"Task {doc_id} failed: {str(e)}")
            task_info = self.in_progress_tasks.pop(doc_id, None)
            if task_info:
                asyncio.create_task(self.handle_failure(task_info['task_data']))

    async def commit_message(self, msg):
        """提交单个消息的偏移量"""
        try:
            await self.consumer.commit({msg.topic_partition: msg.offset + 1})
        except Exception as e:
            logger.error(f"Failed to commit offset: {str(e)}")

    async def handle_failure(self, task_data: dict):
        """处理失败任务，发送到DLQ"""
        dlq_producer = KafkaProducerService()
        await dlq_producer.start()
        await dlq_producer.send_task(
            task_data['doc_id'],
            task_data.get('callback_url'),
            topic=settings.dlq_topic
        )
        await dlq_producer.stop()