package com.hyq.utils;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.TopicListing;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.TopicPartitionInfo;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;

import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

/**
 * @author hanyq
 * @date 2024/6/18 17:06
 */
@Component
public class KafkaUtils {
    @Value("${spring.kafka.bootstrap-servers}")
    private String url;

    @Resource
    private KafkaTemplate kafkaTemplate;

    @Resource
    private AdminClient adminClient;

    /**
    * Description:获取所有的主题名称
    * @Author:hanyq
    * @Date:2024/6/18 17:14
    */
    public List<String> getTopicList(){
        List<String> result = new ArrayList<>();
        try{
            result = adminClient.listTopics().listings().get().stream().map(TopicListing::name).collect(Collectors.toList());
        }catch (Exception e){
            e.printStackTrace();
        }
        return result;
    }

    /**
    * Description:创建topic
    * @Author:hanyq
    * @Date:2024/6/18 17:34
    */
    public void createTopic(Collection<NewTopic> newTopics){
        CreateTopicsResult topics = adminClient.createTopics(newTopics);
    }

    /**
    * Description:删除topic
    * @Author:hanyq
    * @Date:2024/6/18 17:39
    */
    public void deleteTopic(Collection<String> deleteTopics){
        adminClient.deleteTopics(deleteTopics);
    }

    /**
     * 获取指定topic
     * @param topics
     * @return
     */
    public String getTopicInfo(Collection<String> topics) {
        AtomicReference<String> info = new AtomicReference<>("");
        try {
            adminClient.describeTopics(topics).all().get().forEach((topic, description) -> {
                for (TopicPartitionInfo partition : description.partitions()) {
                    info.set(info + partition.toString() + "\n");
                }
            });
        } catch (Exception e) {
            e.printStackTrace();
        }
        return info.get();
    }

    /**
    * Description:发送信息到topic
    * @Author:hanyq
    * @Date:2024/6/18 17:42
    */
    public void sendMessageToTopic(String topic,Object message){
        ListenableFuture send = kafkaTemplate.send(topic, message);
        // 生产者回调
        send.addCallback(successCallback -> {
            // 成功
            SendResult<String,String> sendResult = (SendResult) successCallback;
            System.out.println("=========================start");
            System.out.println(sendResult.getProducerRecord().value());
            System.out.println("=========================end");
        },failureCallback -> {
            // 失败
            System.out.println("-----------------------start");
            System.out.println(failureCallback.getMessage());
            System.out.println("-----------------------end");
        });
    }

    @Bean
    public Map<String, Object> kafkaProducer() {
        Map<String, Object> properties = new HashMap<>();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, url);
        // 消息确认机制配置
        properties.put(ProducerConfig.ACKS_CONFIG, "all");
        // 重试
        properties.put(ProducerConfig.RETRIES_CONFIG, "0");
        // 批次大小
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        // 多长时间发送一个批次
        properties.put(ProducerConfig.LINGER_MS_CONFIG, "1");
        // 缓冲
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        // 序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        // 序列化
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        return properties;
    }
}
