"""
Kafka客户端工具类
提供Kafka连接、测试、数据发送等功能
"""

import json
import asyncio
from datetime import datetime
from decimal import Decimal
from typing import Dict, Any, List, Optional, Union
try:
    from kafka import KafkaProducer, KafkaConsumer
    from kafka.errors import KafkaError, KafkaTimeoutError
except ImportError:
    # 如果kafka-python不可用，创建模拟类
    class KafkaProducer:
        def __init__(self, *args, **kwargs):
            pass
        def send(self, *args, **kwargs):
            pass
        def close(self):
            pass
        def list_topics(self, *args, **kwargs):
            return {}
        def flush(self):
            pass

    class KafkaConsumer:
        def __init__(self, *args, **kwargs):
            pass

    class KafkaError(Exception):
        pass

    class KafkaTimeoutError(Exception):
        pass
import logging

from app.core.config import settings

logger = logging.getLogger(__name__)


def json_serializer(obj):
    """JSON序列化器，处理datetime、Decimal等特殊对象"""
    if isinstance(obj, datetime):
        return obj.strftime('%Y-%m-%d %H:%M:%S')
    elif isinstance(obj, Decimal):
        return str(obj)
    raise TypeError(f"Object of type {type(obj)} is not JSON serializable")


def safe_json_dumps(data, **kwargs):
    """安全的JSON序列化，处理datetime、Decimal对象"""
    return json.dumps(data, default=json_serializer, ensure_ascii=False, **kwargs)


class KafkaClient:
    """Kafka客户端类"""
    
    def __init__(self, bootstrap_servers: str, **config):
        """
        初始化Kafka客户端
        
        Args:
            bootstrap_servers: Kafka服务器地址
            **config: 其他Kafka配置参数
        """
        self.bootstrap_servers = bootstrap_servers
        self.config = config
        self.producer = None
        self._producer_config = {
            'bootstrap_servers': bootstrap_servers,
            'value_serializer': lambda v: safe_json_dumps(v).encode('utf-8'),
            'key_serializer': lambda k: str(k).encode('utf-8') if k else None,
            'acks': 'all',  # 等待所有副本确认
            'retries': 3,   # 重试次数
            'batch_size': 16384,  # 批处理大小
            'linger_ms': 10,      # 等待时间
            'buffer_memory': 33554432,  # 缓冲区大小
            **config
        }
    
    def get_producer(self) -> KafkaProducer:
        """获取Kafka生产者实例"""
        if self.producer is None:
            try:
                self.producer = KafkaProducer(**self._producer_config)
                logger.info(f"Kafka生产者连接成功: {self.bootstrap_servers}")
            except Exception as e:
                logger.error(f"创建Kafka生产者失败: {e}")
                raise
        return self.producer
    
    def close_producer(self):
        """关闭Kafka生产者"""
        if self.producer:
            try:
                self.producer.close()
                self.producer = None
                logger.info("Kafka生产者已关闭")
            except Exception as e:
                logger.error(f"关闭Kafka生产者失败: {e}")
    
    async def test_connection(self, timeout: int = 10) -> Dict[str, Any]:
        """
        测试Kafka连接
        
        Args:
            timeout: 超时时间（秒）
            
        Returns:
            测试结果字典
        """
        start_time = asyncio.get_event_loop().time()
        
        try:
            # 在线程池中执行同步操作
            loop = asyncio.get_event_loop()
            producer = await loop.run_in_executor(
                None, 
                lambda: KafkaProducer(
                    bootstrap_servers=self.bootstrap_servers,
                    request_timeout_ms=timeout * 1000,
                    **self.config
                )
            )
            
            # 获取集群元数据
            metadata = await loop.run_in_executor(None, lambda: producer.list_topics())
            
            # 关闭连接
            await loop.run_in_executor(None, producer.close)
            
            end_time = asyncio.get_event_loop().time()
            response_time = int((end_time - start_time) * 1000)
            
            return {
                'success': True,
                'message': '连接成功',
                'response_time': response_time,
                'cluster_info': {
                    'topics_count': len(metadata),
                    'bootstrap_servers': self.bootstrap_servers
                }
            }
            
        except KafkaTimeoutError:
            return {
                'success': False,
                'message': '连接超时',
                'response_time': timeout * 1000,
                'error_type': 'timeout'
            }
        except KafkaError as e:
            return {
                'success': False,
                'message': f'Kafka错误: {str(e)}',
                'response_time': int((asyncio.get_event_loop().time() - start_time) * 1000),
                'error_type': 'kafka_error',
                'error_details': str(e)
            }
        except Exception as e:
            return {
                'success': False,
                'message': f'连接失败: {str(e)}',
                'response_time': int((asyncio.get_event_loop().time() - start_time) * 1000),
                'error_type': 'connection_error',
                'error_details': str(e)
            }
    
    async def send_message(
        self, 
        topic: str, 
        message: Union[Dict, List, str], 
        key: Optional[str] = None,
        partition: Optional[int] = None
    ) -> Dict[str, Any]:
        """
        发送单条消息到Kafka
        
        Args:
            topic: 主题名称
            message: 消息内容
            key: 消息键（可选）
            partition: 分区号（可选）
            
        Returns:
            发送结果
        """
        try:
            producer = self.get_producer()
            
            # 发送消息
            future = producer.send(
                topic=topic,
                value=message,
                key=key,
                partition=partition
            )
            
            # 等待发送完成
            record_metadata = future.get(timeout=10)
            
            return {
                'success': True,
                'topic': record_metadata.topic,
                'partition': record_metadata.partition,
                'offset': record_metadata.offset,
                'timestamp': record_metadata.timestamp
            }
            
        except Exception as e:
            logger.error(f"发送消息到Kafka失败: {e}")
            return {
                'success': False,
                'error': str(e)
            }
    
    async def send_batch_messages(
        self, 
        topic: str, 
        messages: List[Dict[str, Any]],
        batch_size: int = 1000
    ) -> Dict[str, Any]:
        """
        批量发送消息到Kafka
        
        Args:
            topic: 主题名称
            messages: 消息列表，每个消息包含value和可选的key
            batch_size: 批处理大小
            
        Returns:
            批量发送结果
        """
        try:
            producer = self.get_producer()
            total_messages = len(messages)
            success_count = 0
            failed_count = 0
            errors = []
            
            # 分批处理消息
            for i in range(0, total_messages, batch_size):
                batch = messages[i:i + batch_size]
                batch_futures = []
                
                # 发送当前批次的消息
                for msg in batch:
                    try:
                        future = producer.send(
                            topic=topic,
                            value=msg.get('value'),
                            key=msg.get('key'),
                            partition=msg.get('partition')
                        )
                        batch_futures.append(future)
                    except Exception as e:
                        failed_count += 1
                        errors.append(f"发送消息失败: {str(e)}")
                
                # 等待当前批次完成
                for future in batch_futures:
                    try:
                        future.get(timeout=10)
                        success_count += 1
                    except Exception as e:
                        failed_count += 1
                        errors.append(f"消息发送超时或失败: {str(e)}")
                
                # 刷新生产者缓冲区
                producer.flush()
            
            return {
                'success': failed_count == 0,
                'total_messages': total_messages,
                'success_count': success_count,
                'failed_count': failed_count,
                'errors': errors[:10]  # 只返回前10个错误
            }
            
        except Exception as e:
            logger.error(f"批量发送消息到Kafka失败: {e}")
            return {
                'success': False,
                'total_messages': len(messages),
                'success_count': 0,
                'failed_count': len(messages),
                'error': str(e)
            }
    
    async def get_topic_info(self, topic: str) -> Dict[str, Any]:
        """
        获取主题信息
        
        Args:
            topic: 主题名称
            
        Returns:
            主题信息
        """
        try:
            loop = asyncio.get_event_loop()
            producer = self.get_producer()
            
            # 获取主题元数据
            metadata = await loop.run_in_executor(
                None, 
                lambda: producer.list_topics(topic, timeout=10)
            )
            
            if topic in metadata:
                topic_metadata = metadata[topic]
                return {
                    'success': True,
                    'topic': topic,
                    'partitions': len(topic_metadata.partitions),
                    'partition_info': [
                        {
                            'partition': p.partition,
                            'leader': p.leader,
                            'replicas': p.replicas,
                            'isr': p.isr
                        }
                        for p in topic_metadata.partitions.values()
                    ]
                }
            else:
                return {
                    'success': False,
                    'message': f'主题 {topic} 不存在'
                }
                
        except Exception as e:
            logger.error(f"获取主题信息失败: {e}")
            return {
                'success': False,
                'error': str(e)
            }
    
    def __enter__(self):
        """上下文管理器入口"""
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        """上下文管理器出口"""
        self.close_producer()


class KafkaConnectionPool:
    """Kafka连接池管理类"""
    
    def __init__(self, max_connections: int = 10):
        """
        初始化连接池
        
        Args:
            max_connections: 最大连接数
        """
        self.max_connections = max_connections
        self._connections: Dict[str, List[KafkaClient]] = {}
        self._lock = asyncio.Lock()
    
    async def get_client(self, bootstrap_servers: str, **config) -> KafkaClient:
        """
        从连接池获取Kafka客户端
        
        Args:
            bootstrap_servers: Kafka服务器地址
            **config: 其他配置参数
            
        Returns:
            Kafka客户端实例
        """
        connection_key = f"{bootstrap_servers}:{hash(frozenset(config.items()))}"
        
        async with self._lock:
            if connection_key not in self._connections:
                self._connections[connection_key] = []
            
            pool = self._connections[connection_key]
            
            # 如果池中有可用连接，直接返回
            if pool:
                return pool.pop()
            
            # 创建新连接
            return KafkaClient(bootstrap_servers, **config)
    
    async def return_client(self, client: KafkaClient, bootstrap_servers: str, **config):
        """
        将客户端返回到连接池
        
        Args:
            client: Kafka客户端实例
            bootstrap_servers: Kafka服务器地址
            **config: 其他配置参数
        """
        connection_key = f"{bootstrap_servers}:{hash(frozenset(config.items()))}"
        
        async with self._lock:
            if connection_key not in self._connections:
                self._connections[connection_key] = []
            
            pool = self._connections[connection_key]
            
            # 如果池未满，将连接放回池中
            if len(pool) < self.max_connections:
                pool.append(client)
            else:
                # 池已满，关闭连接
                client.close_producer()
    
    async def close_all(self):
        """关闭所有连接"""
        async with self._lock:
            for pool in self._connections.values():
                for client in pool:
                    client.close_producer()
            self._connections.clear()


# 全局连接池实例
kafka_pool = KafkaConnectionPool()
