"""
Kafka服务层
"""
import json
import time
from typing import Dict, List, Optional, Any
from datetime import datetime
from kafka import KafkaProducer, KafkaConsumer, KafkaAdminClient
from kafka.admin import ConfigResource, ConfigResourceType
from kafka.errors import KafkaError, KafkaTimeoutError, NoBrokersAvailable
from kafka.structs import TopicPartition

from backend.models.kafka_models import (
    KafkaConnectionConfig, TopicInfo, PartitionInfo, 
    MessageRequest, MessageResponse, ConnectionStatus
)
from backend.utils.exceptions import (
    KafkaConnectionException, KafkaTopicException, 
    KafkaMessageException, KafkaAuthenticationException
)
from backend.utils.logger import log_manager

class KafkaService:
    """Kafka服务类"""
    
    def __init__(self):
        self.producer: Optional[KafkaProducer] = None
        self.admin_client: Optional[KafkaAdminClient] = None
        self.connection_config: Optional[KafkaConnectionConfig] = None
        self.connected: bool = False
        self.connected_at: Optional[datetime] = None
        self.last_error: Optional[str] = None
    
    def _get_kafka_config(self, config: KafkaConnectionConfig) -> Dict[str, Any]:
        """获取Kafka配置字典"""
        kafka_config = {
            'bootstrap_servers': config.bootstrap_servers.split(','),
            'security_protocol': config.security_protocol,
            'api_version': (0, 10, 1),  # 兼容更多版本
            'request_timeout_ms': 30000,
            'connections_max_idle_ms': 540000,
        }
        
        # 添加SASL配置
        if config.security_protocol in ['SASL_PLAINTEXT', 'SASL_SSL']:
            if not config.sasl_mechanism:
                raise KafkaAuthenticationException("SASL认证需要指定认证机制")
            
            kafka_config.update({
                'sasl_mechanism': config.sasl_mechanism,
                'sasl_plain_username': config.sasl_plain_username,
                'sasl_plain_password': config.sasl_plain_password,
            })
        
        return kafka_config
    
    def test_connection(self, config: KafkaConnectionConfig) -> ConnectionStatus:
        """测试Kafka连接"""
        try:
            kafka_config = self._get_kafka_config(config)
            
            # 创建临时管理客户端来测试连接
            test_admin = KafkaAdminClient(**kafka_config)
            
            # 尝试获取集群元数据来验证连接
            metadata = test_admin.describe_cluster()
            
            # 如果成功获取到元数据，说明连接成功
            if metadata:
                log_manager.log_connection_test(config.bootstrap_servers, True)
                return ConnectionStatus(
                    connected=True,
                    bootstrap_servers=config.bootstrap_servers,
                    connected_at=datetime.now()
                )
            else:
                error_msg = "无法获取集群元数据"
                log_manager.log_connection_test(config.bootstrap_servers, False, error_msg)
                return ConnectionStatus(
                    connected=False,
                    error=error_msg
                )
                
        except NoBrokersAvailable:
            error_msg = f"无法连接到Kafka服务器: {config.bootstrap_servers}"
            log_manager.log_connection_test(config.bootstrap_servers, False, error_msg)
            return ConnectionStatus(
                connected=False,
                error=error_msg
            )
        except KafkaError as e:
            error_msg = f"Kafka连接错误: {str(e)}"
            log_manager.log_connection_test(config.bootstrap_servers, False, error_msg)
            return ConnectionStatus(
                connected=False,
                error=error_msg
            )
        except Exception as e:
            error_msg = f"连接失败: {str(e)}"
            log_manager.log_connection_test(config.bootstrap_servers, False, error_msg)
            return ConnectionStatus(
                connected=False,
                error=error_msg
            )
        finally:
            # 关闭测试连接
            if 'test_admin' in locals():
                test_admin.close()
    
    def connect(self, config: KafkaConnectionConfig) -> ConnectionStatus:
        """连接到Kafka集群"""
        try:
            # 先测试连接
            status = self.test_connection(config)
            if not status.connected:
                return status
            
            # 获取Kafka配置
            kafka_config = self._get_kafka_config(config)
            
            # 创建生产者
            self.producer = KafkaProducer(
                **kafka_config,
                value_serializer=lambda v: json.dumps(v).encode('utf-8') if isinstance(v, dict) else str(v).encode('utf-8'),
                key_serializer=lambda k: str(k).encode('utf-8') if k is not None else None,
                acks='all',
                retries=3,
                batch_size=16384,
                linger_ms=10,
                buffer_memory=33554432,
            )
            
            # 创建管理客户端
            self.admin_client = KafkaAdminClient(**kafka_config)
            
            # 保存连接信息
            self.connection_config = config
            self.connected = True
            self.connected_at = datetime.now()
            self.last_error = None
            
            log_manager.log_operation(
                operation="CONNECT",
                message=f"成功连接到Kafka集群: {config.bootstrap_servers}",
                success=True,
                details={"bootstrap_servers": config.bootstrap_servers}
            )
            
            return ConnectionStatus(
                connected=True,
                bootstrap_servers=config.bootstrap_servers,
                connected_at=self.connected_at
            )
            
        except Exception as e:
            self.last_error = str(e)
            log_manager.log_operation(
                operation="CONNECT",
                message=f"连接Kafka集群失败: {str(e)}",
                success=False,
                details={"bootstrap_servers": config.bootstrap_servers, "error": str(e)}
            )
            raise KafkaConnectionException(f"连接失败: {str(e)}", config.bootstrap_servers)
    
    def disconnect(self):
        """断开Kafka连接"""
        try:
            if self.producer:
                self.producer.close()
                self.producer = None
            
            if self.admin_client:
                self.admin_client.close()
                self.admin_client = None
            
            bootstrap_servers = self.connection_config.bootstrap_servers if self.connection_config else "unknown"
            
            self.connected = False
            self.connected_at = None
            self.connection_config = None
            self.last_error = None
            
            log_manager.log_operation(
                operation="DISCONNECT",
                message=f"已断开Kafka连接: {bootstrap_servers}",
                success=True
            )
            
        except Exception as e:
            log_manager.log_operation(
                operation="DISCONNECT",
                message=f"断开连接时发生错误: {str(e)}",
                success=False,
                details={"error": str(e)}
            )
    
    def get_connection_status(self) -> ConnectionStatus:
        """获取连接状态"""
        return ConnectionStatus(
            connected=self.connected,
            bootstrap_servers=self.connection_config.bootstrap_servers if self.connection_config else None,
            connected_at=self.connected_at,
            error=self.last_error
        )
    
    def get_topics(self) -> List[TopicInfo]:
        """获取所有Topic信息"""
        if not self.connected or not self.admin_client:
            raise KafkaConnectionException("未连接到Kafka集群")
        
        try:
            # 获取Topic列表
            topics_metadata = self.admin_client.list_topics()
            topic_names = []
            
            # 处理不同版本的返回格式
            if isinstance(topics_metadata, dict) and 'topics' in topics_metadata:
                topic_names = [name for name in topics_metadata['topics'].keys() if not name.startswith('__')]
            elif isinstance(topics_metadata, dict):
                topic_names = [name for name in topics_metadata.keys() if not name.startswith('__')]
            else:
                # 尝试直接从返回值中获取topic名称
                try:
                    cluster_metadata = self.admin_client._get_cluster_metadata()
                    topic_names = [topic for topic in cluster_metadata.topics if not topic.startswith('__')]
                except:
                    # 如果上述方法都失败，尝试直接使用返回值
                    topic_names = [topic for topic in topics_metadata if not topic.startswith('__')]
            
            if not topic_names:
                log_manager.log_topic_list(0, True)
                return []
            
            # 获取Topic配置
            configs = {}
            try:
                for topic_name in topic_names:
                    try:
                        config_resource = ConfigResource(ConfigResourceType.TOPIC, topic_name)
                        config_results = self.admin_client.describe_configs([config_resource])
                        
                        # 处理配置结果
                        for resource, config in config_results.items():
                            if hasattr(config, 'configs'):
                                configs[resource.name] = dict(config.configs)
                            else:
                                configs[resource.name] = dict(config)
                    except Exception as e:
                        log_manager.log_operation(
                            operation="GET_TOPIC_CONFIG",
                            message=f"获取Topic {topic_name} 配置失败: {str(e)}",
                            success=False,
                            details={"topic": topic_name, "error": str(e)}
                        )
            except Exception as e:
                log_manager.log_operation(
                    operation="GET_TOPIC_CONFIGS",
                    message=f"获取Topic配置失败: {str(e)}",
                    success=False,
                    details={"error": str(e)}
                )
            
            # 构建Topic信息列表
            topics = []
            
            # 创建临时consumer来获取详细信息
            consumer_config = {
                'bootstrap_servers': self.connection_config.bootstrap_servers.split(','),
                'security_protocol': self.connection_config.security_protocol,
                'api_version': (3, 0, 0),
                'group_id': None,
                'enable_auto_commit': False
            }
            
            # 添加SASL配置
            if self.connection_config.security_protocol in ['SASL_PLAINTEXT', 'SASL_SSL']:
                if not self.connection_config.sasl_mechanism:
                    raise KafkaAuthenticationException("SASL认证需要指定认证机制")
                    
                consumer_config.update({
                    'sasl_mechanism': self.connection_config.sasl_mechanism,
                    'sasl_plain_username': self.connection_config.sasl_plain_username,
                    'sasl_plain_password': self.connection_config.sasl_plain_password,
                })
            
            consumer = KafkaConsumer(**consumer_config)
            
            try:
                for topic_name in topic_names:
                    try:
                        # 获取topic的分区信息
                        partitions = consumer.partitions_for_topic(topic_name)
                        if not partitions:
                            continue
                        
                        # 获取分区元数据来确定副本因子
                        replication_factor = 0
                        try:
                            # 获取topic的详细元数据
                            topic_partitions = [TopicPartition(topic_name, p) for p in partitions]
                            partition_info = consumer._client.cluster.partitions_for_topic(topic_name)
                            if partition_info:
                                # 使用第一个分区的副本数作为副本因子
                                first_partition = next(iter(partition_info.values()))
                                replication_factor = len(first_partition.replicas)
                        except:
                            pass
                        
                        topic_info = TopicInfo(
                            name=topic_name,
                            partition_count=len(partitions),
                            replication_factor=replication_factor,
                            configs=configs.get(topic_name, {})
                        )
                        topics.append(topic_info)
                        
                    except Exception as e:
                        log_manager.log_operation(
                            operation="GET_TOPIC_INFO",
                            message=f"获取Topic {topic_name} 详细信息失败: {str(e)}",
                            success=False,
                            details={"topic": topic_name, "error": str(e)}
                        )
                        
            finally:
                consumer.close(autocommit=False)
            
            # 按Topic名称排序
            topics.sort(key=lambda x: x.name)
            
            log_manager.log_topic_list(len(topics), True)
            return topics
            
        except Exception as e:
            error_msg = f"获取Topic列表失败: {str(e)}"
            log_manager.log_topic_list(0, False, error_msg)
            raise KafkaTopicException(error_msg)
    
    def get_topic_partitions(self, topic_name: str) -> List[PartitionInfo]:
        """获取Topic分区信息"""
        if not self.connected or not self.admin_client:
            raise KafkaConnectionException("未连接到Kafka集群")
        
        try:
            metadata = self.admin_client.list_topics()
            
            if topic_name not in metadata.topics:
                raise KafkaTopicException(f"Topic '{topic_name}' 不存在", topic_name)
            
            topic_metadata = metadata.topics[topic_name]
            partitions = []
            
            for partition_id, partition_metadata in topic_metadata.partitions.items():
                partition_info = PartitionInfo(
                    partition_id=partition_id,
                    leader=partition_metadata.leader,
                    replicas=list(partition_metadata.replicas),
                    isr=list(partition_metadata.isr)
                )
                partitions.append(partition_info)
            
            log_manager.log_operation(
                operation="GET_PARTITIONS",
                message=f"获取Topic分区信息成功: {topic_name}",
                success=True,
                details={"topic": topic_name, "partition_count": len(partitions)}
            )
            
            return partitions
            
        except KafkaTopicException:
            raise
        except Exception as e:
            error_msg = f"获取Topic分区信息失败: {str(e)}"
            log_manager.log_operation(
                operation="GET_PARTITIONS",
                message=error_msg,
                success=False,
                details={"topic": topic_name, "error": str(e)}
            )
            raise KafkaTopicException(error_msg, topic_name)
    
    def send_message(self, request: MessageRequest) -> MessageResponse:
        """发送消息"""
        if not self.connected or not self.producer:
            raise KafkaConnectionException("未连接到Kafka集群")
        
        try:
            # 准备消息数据
            message_value = request.value
            message_key = request.key
            
            # 尝试将字符串解析为JSON
            try:
                message_value = json.loads(request.value)
            except json.JSONDecodeError:
                # 如果不是JSON，保持原始字符串
                pass
            
            # 准备消息头
            headers = []
            if request.headers:
                headers = [(k, v.encode('utf-8')) for k, v in request.headers.items()]
            
            # 发送消息
            future = self.producer.send(
                topic=request.topic,
                value=message_value,
                key=message_key,
                partition=request.partition,
                headers=headers
            )
            
            # 等待发送完成
            record_metadata = future.get(timeout=30)
            
            # 确保消息被发送
            self.producer.flush()
            
            response = MessageResponse(
                success=True,
                partition=record_metadata.partition,
                offset=record_metadata.offset,
                timestamp=datetime.fromtimestamp(record_metadata.timestamp / 1000.0)
            )
            
            log_manager.log_message_send(
                topic=request.topic,
                success=True,
                partition=record_metadata.partition,
                offset=record_metadata.offset
            )
            
            return response
            
        except KafkaTimeoutError:
            error_msg = "消息发送超时"
            log_manager.log_message_send(request.topic, False, error=error_msg)
            raise KafkaMessageException(error_msg, request.topic, request.partition)
        except Exception as e:
            error_msg = f"消息发送失败: {str(e)}"
            log_manager.log_message_send(request.topic, False, error=error_msg)
            raise KafkaMessageException(error_msg, request.topic, request.partition)
    
    def send_batch_messages(self, topic: str, messages: List[Dict[str, Any]]) -> List[MessageResponse]:
        """批量发送消息"""
        if not self.connected or not self.producer:
            raise KafkaConnectionException("未连接到Kafka集群")
        
        responses = []
        
        for i, message_data in enumerate(messages):
            try:
                # 构建消息请求
                request = MessageRequest(
                    topic=topic,
                    value=message_data.get('value', ''),
                    key=message_data.get('key'),
                    partition=message_data.get('partition'),
                    headers=message_data.get('headers')
                )
                
                response = self.send_message(request)
                responses.append(response)
                
            except Exception as e:
                # 记录失败的消息
                error_response = MessageResponse(
                    success=False,
                    error=str(e)
                )
                responses.append(error_response)
        
        success_count = sum(1 for r in responses if r.success)
        log_manager.log_operation(
            operation="SEND_BATCH",
            message=f"批量发送消息完成: {success_count}/{len(messages)} 成功",
            success=success_count > 0,
            details={"topic": topic, "total": len(messages), "success": success_count}
        )
        
        return responses

# 全局Kafka服务实例
kafka_service = KafkaService() 