package com.xunkids.bos.migration.component.transmission.kafka;

import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.xunkids.bos.migration.common.exception.RushException;
import com.xunkids.bos.migration.component.log.LogService;
import com.xunkids.bos.migration.component.log.MigrationLog;
import com.xunkids.bos.migration.component.log.impl.SysLog;
import com.xunkids.bos.migration.component.transmission.pojo.TransportMessage;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;

/**
 * 默认 kafka 发布器
 *
 * @author xigexb
 * @version 1.0.0
 * @since 2022/1/6 21:41
 */
public class DefaultKafkaPublisher implements KafkaPublisher {

    /**
     * 日志
     */
    private final Logger logger;

    private final Map<String, Object> config;

    /**
     * kafka Producer
     */
    private final List<Producer<String, String>> kafkaProducers;

    private final int clientNum = Runtime.getRuntime().availableProcessors() / 2 == 0 ? 1 : Runtime.getRuntime().availableProcessors();

    public DefaultKafkaPublisher(Map<String, Object> config, LogService logService) {
        /**
         * 日志服务
         */
        this.logger = LoggerFactory.getLogger(getClass());
        Map<String, Object> cfg = new HashMap<>();
        config.forEach((k, v) -> {
            cfg.put(k.replace("-", "."), v);
        });
        this.config = KafkaConfigTool.mergeMap(KafkaConfigTool.producers_default_config, cfg);
        this.kafkaProducers = makeClient();
    }

    /**
     * 发布消息
     *
     * @param topic 主题
     * @param str   消息
     */
    @Override
    public void send(String topic, String str) throws RushException {
        String msgId = IdUtil.nanoId(32);
        ProducerRecord<String, String> kafkaMessage = createKafkaMessage(msgId, topic, str, null);
        try {
                kafkaProducers.get(ThreadLocalRandom.current().nextInt(clientNum) % clientNum).send(kafkaMessage, (meta, e) -> {
                if(e != null){
                    logger.error("发送失败",e);
                }
            });
        }catch (Exception e){
            e.printStackTrace();
            throw new RushException(e.getMessage());
        }
        if (logger.isInfoEnabled()) {
            logger.info("默认Kafka发布器发布主题[{}]数据:{}", topic, str);
        }
    }

    /**
     * 构造消息
     *
     * @param messageId 消息ID
     * @param topic     主题
     * @param data      数据
     * @param headers
     * @return
     */
    public ProducerRecord<String, String> createKafkaMessage(String messageId, String topic, Serializable data, Map<String, Object> headers) {
        String value = data instanceof String ? (String) data : JSON.toJSONString(data);
        ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, messageId, value);
//        if (headers != null && !headers.isEmpty()) {
//            headers.forEach((key, val) -> {
//                if (StrUtil.isBlank(key) && val != null) {
//
//                    //producerRecord.headers().add(new RecordHeader(key, val.toString().getBytes(StandardCharsets.UTF_8)));
//                }
//            });
//        }
        return producerRecord;
    }

    /**
     * 发布消息
     *
     * @param topic   主题
     * @param message 消息
     */
    @Override
    public void send(String topic, TransportMessage<Map<String, Object>> message)throws RushException {
        send(topic, JSON.toJSONString(message));
    }


    public List<Producer<String, String>> makeClient() {
        List<Producer<String, String>> clients = new ArrayList<>(clientNum);
        int end = clientNum + 1;
        for (int i = 1; i < end; i++) {
            HashMap<String, Object> map = new HashMap<>(this.config);
            clients.add(new KafkaProducer<>(map));
        }
        if (logger.isInfoEnabled()) {
            logger.info("make kafka pub " + clients.size());
        }
        return clients;
    }

}
