from kafka import KafkaProducer
from kafka.errors import NoBrokersAvailable, KafkaTimeoutError
import json
import time
import os
import logging

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def get_kafka_broker():
    return os.getenv('KAFKA_BROKER', 'kafka-0.kafka.kafka-practise.svc.cluster.local:9092')

def create_producer_with_retry(max_retries=10, retry_interval=10):
    """创建 producer 并重试连接"""
    for attempt in range(max_retries):
        try:
            producer = KafkaProducer(
                bootstrap_servers=[get_kafka_broker()],
                value_serializer=lambda v: json.dumps(v).encode('utf-8'),
                key_serializer=lambda k: k.encode('utf-8') if k else None,
                request_timeout_ms=30000,
                retries=5,
                api_version=(2, 6, 0)  # 明确指定 API 版本
            )
            
            # 测试连接：检查主题分区
            try:
                partitions = producer.partitions_for('test-topic')
                logger.info(f"成功连接到 Kafka Broker: {get_kafka_broker()}")
                if partitions:
                    logger.info(f"主题 'test-topic' 的分区: {partitions}")
                else:
                    logger.info("主题 'test-topic' 不存在，将自动创建")
            except Exception as e:
                logger.info(f"连接到 Kafka 成功，但主题检查时出现预期内错误: {e}")
            
            return producer
            
        except NoBrokersAvailable as e:
            logger.warning(f"尝试 {attempt + 1}/{max_retries}: 无法连接到 Kafka, {retry_interval} 秒后重试...")
            if attempt == max_retries - 1:
                raise e
            time.sleep(retry_interval)
        except Exception as e:
            logger.warning(f"尝试 {attempt + 1}/{max_retries}: 连接异常, {retry_interval} 秒后重试... 错误: {e}")
            if attempt == max_retries - 1:
                raise e
            time.sleep(retry_interval)

def main():
    logger.info("启动 Kafka Producer...")
    
    try:
        producer = create_producer_with_retry()
        
        topic = 'test-topic'
        message_count = 0
        
        logger.info("开始发送消息...")
        
        while True:
            message = {
                'id': message_count,
                'timestamp': time.time(),
                'message': f'测试消息 #{message_count}',
                'source': 'kafka-producer',
                'cluster_mode': 'KRAFT'
            }
            
            # 生成 key - 保持为字符串，key_serializer 会处理编码
            key_str = f'key-{message_count % 10}'
            
            try:
                # 发送消息 - key 作为字符串传递，key_serializer 会自动编码
                future = producer.send(
                    topic, 
                    value=message,
                    key=key_str  # 直接传递字符串，不要编码
                )
                
                # 等待发送确认
                future.get(timeout=10)
                
                logger.info(f"成功发送消息 #{message_count}: key={key_str}, value={message['message']}")
                message_count += 1
                time.sleep(5)  # 每5秒发送一条消息
                
            except KafkaTimeoutError as e:
                logger.error(f"发送消息超时: {e}")
                time.sleep(5)
            except Exception as e:
                logger.error(f"发送消息失败: {e}")
                # 打印更多调试信息
                logger.error(f"Key 类型: {type(key_str)}, Key 值: {key_str}")
                logger.error(f"Value 类型: {type(message)}, Value 值: {message}")
                time.sleep(5)
                
    except KeyboardInterrupt:
        logger.info("\n停止生产者...")
    except Exception as e:
        logger.error(f"Producer 启动失败: {e}")
    finally:
        if 'producer' in locals():
            producer.close()
            logger.info("Producer 已关闭")

if __name__ == "__main__":
    main()
