from abc import ABC, abstractmethod
import sys
import time
from typing import List
from common.utils import logger
from kafka import KafkaConsumer
from kafka.errors import KafkaError
from kafka.structs import OffsetAndMetadata
from kafka import TopicPartition
import traceback

from pydantic import BaseModel, Field

class KafkaConsumerConf(BaseModel):
    bootstrap_servers: List[str] = Field(alias="bootstrap_servers")
    topic: str = Field(alias="topic")
    group_id: str = Field(alias="group_id")
    
class KafkaMsgConsumer(ABC):
    def __init__(self):
        super().__init__()
    
    @abstractmethod 
    def consume(self, message) -> None:
        raise NotImplementedError

class KafkaConsumerService:
    def __init__(self, kadka_consumer_conf: KafkaConsumerConf, consumer: KafkaMsgConsumer):
        self._bootstrap_servers = kadka_consumer_conf.bootstrap_servers
        self._topic = kadka_consumer_conf.topic
        self._group_id = kadka_consumer_conf.group_id
        
        self.logger = logger.my_logger()
        
        self.consumer = consumer

    def process_message(self, message):
        try:
            # 实际业务处理逻辑
            self.logger.info(f"【KafkaConsumerService】Processing message: {message.value.decode('utf-8')}")
            self.consumer.consume(message)
            
        except Exception as e:
            self.logger.error(f"【KafkaConsumerService】Error processing message: {e}\n{traceback.format_exc()}")
            # 可根据需要添加重试逻辑

    def run_consumer(self):
        print("【KafkaConsumerService】runing...", file=sys.stderr)
        
        # 配置参数（根据实际最长任务时间调整）
        max_poll_interval = 3600000  # 1小时（大于最长处理时间）
        session_timeout = 60000      # 1分钟（需小于broker的group.max.session.timeout.ms）
        heartbeat_interval = 20000   # 20秒（约为session_timeout的1/3）

        while True:
            consumer = None
            try:
                consumer = KafkaConsumer(
                    self._topic,
                    bootstrap_servers=self._bootstrap_servers,
                    auto_offset_reset='latest',
                    enable_auto_commit=False,
                    group_id=self._group_id,
                    max_poll_interval_ms=max_poll_interval,
                    session_timeout_ms=session_timeout,
                    heartbeat_interval_ms=heartbeat_interval
                )
                
                self.logger.info(f"【KafkaConsumerService】Started Kafka consumer for topic: {self._topic}")
                
                for message in consumer:
                    topic_part = TopicPartition(topic=message.topic, partition=message.partition)
                    offsets = {
                        topic_part: OffsetAndMetadata(
                            offset=message.offset + 1,
                            metadata=None,
                            leader_epoch=None
                        )
                    }
                    try:
                        self.process_message(message)
                    except Exception as e:
                        self.logger.error(f"【KafkaConsumerService】Unexpected error in message loop: {e}")
                        time.sleep(5)  # 错误后暂停避免快速循环

            except KafkaError as e:
                self.logger.error(f"【KafkaConsumerService】Kafka connection error: {e}")
                time.sleep(10)  # 连接错误后重试间隔
            except Exception as e:
                self.logger.critical(f"【KafkaConsumerService】Critical error: {e}\n{traceback.format_exc()}")
                raise  # 严重错误直接抛出
            finally:
                if consumer:
                    try:
                        consumer.close()
                    except:
                        pass
