package com.atguigu.kafka.config;

import com.atguigu.kafka.interceptor.CountInterCeptor;
import com.atguigu.kafka.interceptor.TimeInterceptor;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;



@Configuration
public class KafkaProducer {

    private static final String TOPIC = "hainiu";

    @Value("${spring.kafka.bootstrap-servers}")
    private String server;


    @Value("${spring.kafka.producer.key-serializer}")
    private String keySerilizerClass;

    @Value("${spring.kafka.producer.value-serializer}")
    private String valueSerilizerClass;


    @Autowired(required = false)
    private KafkaTemplate<String, String> kafkaTemplate;




//    @Bean
//    public Map<String,Object> producerConfig(){
//        Map<String,Object> map = new HashMap<String,Object>();
//
//        map.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,server);
//        map.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,keySerilizerClass);
//        map.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,valueSerilizerClass);
//
//
//        List<String> interceptors = new ArrayList<String>();
//        interceptors.add(CountInterCeptor.class.getName());
//        interceptors.add(TimeInterceptor.class.getName());
//        map.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG,interceptors);
//
//        return map;
//
//    }




    public void sendAdminMessage(String message) throws Exception {

        //手动的指定分区 是1
        //如果不指定指定key ,会根据key来进行haxi算法生成分区号
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(TOPIC, 1,"my",message);

        // 配置回调
        future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
            @Override
            public void onSuccess(SendResult<String, String> result) {


                // 消息发送成功的处理逻辑
                System.out.println("Sent message=[" + message + "] to topic=[" + TOPIC + "] with offset=[" + result.getRecordMetadata().offset() + "]"
                                    + "with partition=" + result.getRecordMetadata().partition() + "]");

            }

            @Override
            public void onFailure(Throwable ex) {
                // 消息发送失败的处理逻辑
                System.err.println("Unable to send message=[" + message + "] due to : " + ex.getMessage());
            }
        });
        System.out.println("发送消息。。。");

        //加上这个代码表示同步的发送消息。
        future.get();


    }

    //发送事务消息
    public void sendTransactionMessage(String message)  {


        kafkaTemplate.executeInTransaction(operations ->{
           return operations.send(TOPIC,message);
            //throw new RuntimeException("fail");
        });


    }






}
