"""Kafka生产者/消费者（实时数据流转）"""
from fin_senti_entity_platform.utils.constants import KAFKA_TOPICS

"""Kafka生产者/消费者（实时数据流转）

提供与Kafka交互的客户端接口，用于实时数据流的生产和消费。
支持消息发送、消费、主题管理等核心功能。
"""
import json
from typing import List, Dict, Callable, Any
from confluent_kafka import Producer, Consumer, KafkaError, KafkaException
from confluent_kafka.admin import AdminClient, NewTopic

from fin_senti_entity_platform.utils.logger import get_logger
from fin_senti_entity_platform.utils.config_loader import ConfigLoader


class KafkaClient:
    """Kafka客户端类，封装Kafka生产者和消费者功能"""
    _instance = None
    _lock = False
    
    def __new__(cls):
        """单例模式实现"""
        if cls._instance is None:
            if cls._lock:
                raise Exception("正在创建KafkaClient实例，请稍后再试")
            cls._lock = True
            cls._instance = super(KafkaClient, cls).__new__(cls)
            cls._lock = False
        return cls._instance
    
    def __init__(self):
        """初始化Kafka客户端"""
        if not hasattr(self, '_initialized'):
            self.config = ConfigLoader().get_config()
            self.logger = get_logger(__name__)
            self._init_producer()
            self._init_consumer()
            self._init_admin()
            self._initialized = True
    
    def _init_producer(self):
        """初始化Kafka生产者"""
        try:
            kafka_config = self.config.get('data_collection', {}).get('kafka', {})
            producer_config = {
                'bootstrap.servers': kafka_config.get('bootstrap_servers', 'localhost:9092'),
                'client.id': kafka_config.get('client_id', 'fin-senti-producer'),
                'acks': kafka_config.get('acks', 'all'),
                'linger.ms': kafka_config.get('linger_ms', 10),
                'batch.size': kafka_config.get('batch_size', 16384),
                'compression.type': kafka_config.get('compression_type', 'gzip')
            }
            self.producer = Producer(producer_config)
            self.logger.info(f"成功初始化Kafka生产者，连接到: {kafka_config.get('bootstrap_servers')}")
        except Exception as e:
            self.logger.error(f"初始化Kafka生产者失败: {str(e)}")
            raise
    
    def _init_consumer(self):
        """初始化Kafka消费者配置（按需创建消费者实例）"""
        try:
            kafka_config = self.config.get('data_collection', {}).get('kafka', {})
            self.consumer_config_template = {
                'bootstrap.servers': kafka_config.get('bootstrap_servers', 'localhost:9092'),
                'group.id': kafka_config.get('default_group_id', 'fin-senti-consumer-group'),
                'auto.offset.reset': kafka_config.get('auto_offset_reset', 'earliest'),
                'enable.auto.commit': kafka_config.get('enable_auto_commit', True),
                'auto.commit.interval.ms': kafka_config.get('auto_commit_interval_ms', 5000)
            }
            self.consumers = {}
            self.logger.info("成功初始化Kafka消费者配置模板")
        except Exception as e:
            self.logger.error(f"初始化Kafka消费者配置失败: {str(e)}")
            raise
    
    def _init_admin(self):
        """初始化Kafka管理员客户端"""
        try:
            kafka_config = self.config.get('data_collection', {}).get('kafka', {})
            admin_config = {
                'bootstrap.servers': kafka_config.get('bootstrap_servers', 'localhost:9092')
            }
            self.admin_client = AdminClient(admin_config)
            self.logger.info("成功初始化Kafka管理员客户端")
        except Exception as e:
            self.logger.error(f"初始化Kafka管理员客户端失败: {str(e)}")
            raise
    
    def create_consumer(self, group_id: str = None, topics: List[str] = None) -> Consumer:
        """创建并返回一个Kafka消费者实例
        
        Args:
            group_id: 消费者组ID
            topics: 要订阅的主题列表
        
        Returns:
            Consumer: Kafka消费者实例
        """
        try:
            consumer_config = self.consumer_config_template.copy()
            if group_id:
                consumer_config['group.id'] = group_id
            
            consumer = Consumer(consumer_config)
            
            if topics:
                consumer.subscribe(topics)
                self.logger.info(f"消费者已订阅主题: {topics}，消费者组: {group_id or consumer_config['group.id']}")
            
            # 存储消费者实例以便管理
            self.consumers[group_id or consumer_config['group.id']] = consumer
            
            return consumer
        except Exception as e:
            self.logger.error(f"创建Kafka消费者失败: {str(e)}")
            raise
    
    def send_message(self, topic: str, message: Any, key: str = None, partition: int = None) -> bool:
        """发送消息到Kafka主题
        
        Args:
            topic: 目标主题
            message: 消息内容（会被序列化为JSON）
            key: 消息键
            partition: 目标分区
        
        Returns:
            bool: 是否发送成功
        """
        try:
            # 确保主题存在
            if not self.check_topic_exists(topic):
                self.logger.warning(f"主题不存在: {topic}，正在创建...")
                if not self.create_topic(topic):
                    self.logger.error(f"创建主题失败: {topic}")
                    return False
            
            # 序列化消息
            if isinstance(message, (dict, list)):
                message_str = json.dumps(message, ensure_ascii=False)
            else:
                message_str = str(message)
            
            # 发送消息
            self.producer.produce(
                topic=topic,
                value=message_str.encode('utf-8'),
                key=key.encode('utf-8') if key else None,
                partition=partition,
                callback=self._delivery_report
            )
            
            # 等待消息发送完成
            self.producer.poll(0)
            return True
        except Exception as e:
            self.logger.error(f"发送消息失败: {str(e)}")
            return False
    
    def _delivery_report(self, err, msg):
        """消息发送回调函数"""
        if err is not None:
            self.logger.error(f"消息发送失败: {err}")
        else:
            self.logger.debug(f"消息发送成功: {msg.topic()}[{msg.partition()}] at offset {msg.offset()}")
    
    def consume_messages(self, consumer: Consumer, callback: Callable[[Dict], None], 
                         max_messages: int = 100, timeout: float = 1.0) -> int:
        """消费Kafka消息并调用回调函数处理
        
        Args:
            consumer: Kafka消费者实例
            callback: 处理消息的回调函数
            max_messages: 最大消费消息数
            timeout: 消费超时时间（秒）
        
        Returns:
            int: 实际消费的消息数量
        """
        messages_consumed = 0
        try:
            while messages_consumed < max_messages:
                msg = consumer.poll(timeout=timeout)
                
                if msg is None:
                    continue
                
                if msg.error():
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        self.logger.debug(f"分区消费完毕: {msg.topic()}[{msg.partition()}] reached end at offset {msg.offset()}")
                    else:
                        self.logger.error(f"消费消息错误: {msg.error()}")
                    continue
                
                # 解析消息
                try:
                    message_data = json.loads(msg.value().decode('utf-8'))
                    # 调用回调函数处理消息
                    callback(message_data)
                    messages_consumed += 1
                except json.JSONDecodeError as e:
                    self.logger.error(f"解析消息失败: {str(e)}")
                    continue
            
            return messages_consumed
        except Exception as e:
            self.logger.error(f"消费消息过程异常: {str(e)}")
            return messages_consumed
    
    def create_topic(self, topic_name: str, num_partitions: int = 3, replication_factor: int = 1) -> bool:
        """创建Kafka主题
        
        Args:
            topic_name: 主题名称
            num_partitions: 分区数
            replication_factor: 副本因子
        
        Returns:
            bool: 是否创建成功
        """
        try:
            if self.check_topic_exists(topic_name):
                self.logger.info(f"主题已存在: {topic_name}")
                return True
            
            new_topic = NewTopic(
                topic=topic_name,
                num_partitions=num_partitions,
                replication_factor=replication_factor
            )
            
            # 创建主题
            fs = self.admin_client.create_topics([new_topic])
            
            # 等待创建完成
            for topic, f in fs.items():
                try:
                    f.result()  # 等待操作完成
                    self.logger.info(f"成功创建主题: {topic}")
                except KafkaException as e:
                    self.logger.error(f"创建主题失败: {topic}, 错误: {e.args[0].str()}")
                    return False
            
            return True
        except Exception as e:
            self.logger.error(f"创建主题过程异常: {str(e)}")
            return False
    
    def check_topic_exists(self, topic_name: str) -> bool:
        """检查Kafka主题是否存在
        
        Args:
            topic_name: 主题名称
        
        Returns:
            bool: 主题是否存在
        """
        try:
            metadata = self.admin_client.list_topics(timeout=10)
            return topic_name in metadata.topics
        except Exception as e:
            self.logger.error(f"检查主题存在性失败: {str(e)}")
            return False
    
    def list_topics(self) -> List[str]:
        """列出所有Kafka主题
        
        Returns:
            List[str]: 主题名称列表
        """
        try:
            metadata = self.admin_client.list_topics(timeout=10)
            return list(metadata.topics.keys())
        except Exception as e:
            self.logger.error(f"列出主题失败: {str(e)}")
            return []
    
    def close_all_consumers(self):
        """关闭所有已创建的消费者实例"""
        for group_id, consumer in self.consumers.items():
            try:
                consumer.close()
                self.logger.info(f"关闭消费者组: {group_id}")
            except Exception as e:
                self.logger.error(f"关闭消费者失败: {str(e)}")
        self.consumers.clear()
    
    def flush_producer(self, timeout: float = 5.0) -> int:
        """刷新生产者缓冲区，确保所有消息都被发送
        
        Args:
            timeout: 超时时间（秒）
        
        Returns:
            int: 等待发送的消息数量
        """
        try:
            return self.producer.flush(timeout=timeout)
        except Exception as e:
            self.logger.error(f"刷新生产者缓冲区失败: {str(e)}")
            return -1


# 全局实例
kafka_client = KafkaClient()
"""Kafka客户端全局实例"""


# 便捷函数
def init_kafka_topics():
    """初始化项目所需的Kafka主题"""
    client = KafkaClient()
    for topic_name in KAFKA_TOPICS.values():
        client.create_topic(topic_name)