package com.iot.service.mqtt.bridge.kafka;

import com.iot.service.mqtt.bridge.AbstractBridgeObserver;
import com.iot.service.mqtt.bridge.TlSubject;
import com.iot.service.mqtt.model.request.TlMqttPublishReq;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;

/**
 * @Author: hszhou
 * @Date: 2025/1/21 15:49
 * @Description: kafka桥接数据
 */
@Slf4j
public class KafkaBridgeObserver extends AbstractBridgeObserver {

    protected TlSubject subject;

    private ConcurrentHashMap<String, Producer<String,String>> map = new ConcurrentHashMap<>();

    public KafkaBridgeObserver(TlSubject subject) {
        this.subject = subject;
        this.subject.attach(this);
    }
    @Override
    protected void forward(String clientId, int qos, TlMqttPublishReq req) {
        List<TlKafkaInfo> entityInfos = getEntityInfo();
        for (TlKafkaInfo entityInfo : entityInfos) {
            Producer<String, String> producer = map.computeIfAbsent(entityInfo.getBootstrapServers(), (k) -> {
                Properties props = new Properties();
                props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, entityInfo.getBootstrapServers());
                props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
                props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
                // 可选优化配置
                props.put(ProducerConfig.ACKS_CONFIG, "all");           // 消息确认机制
                props.put(ProducerConfig.RETRIES_CONFIG, 3);            // 重试次数
                props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true); // 幂等性
                return new KafkaProducer<>(props);
            });

            // 3. 构造消息记录
            ProducerRecord<String, String> record = new ProducerRecord<>(
                    entityInfo.getTopic(),
                    // // 消息键（可选）
                    "message-key",
                    req.toString()
                    // 消息值
            );
            RecordMetadata metadata = null;
            try {
                metadata = producer.send(record).get();
            } catch (InterruptedException e) {
                throw new RuntimeException(e);
            } catch (ExecutionException e) {
                throw new RuntimeException(e);
            }
            System.out.printf("同步发送成功 -> topic:%s partition:%d offset:%d%n",
                    metadata.topic(), metadata.partition(), metadata.offset());
        }
    }


    /**
     * 获取kafka的配置 实际上是由用户配置
     * @author hszhou
     * @datetime: 2025-05-12 18:40:16
     * @return List<TlKafkaInfo>
     **/
    private List<TlKafkaInfo>  getEntityInfo(){

        List<TlKafkaInfo> entityInfos = new ArrayList<>();
        entityInfos.add(new TlKafkaInfo("ws","172.28.33.102:9092","org.apache.kafka.common.serialization.StringSerializer","org.apache.kafka.common.serialization.StringSerializer"));
        entityInfos.add(new TlKafkaInfo("ws","172.28.33.102:9092","org.apache.kafka.common.serialization.StringSerializer","org.apache.kafka.common.serialization.StringSerializer"));
        return entityInfos;
    }
}
