package cn.dglydrpy.study.j2ee.kafka.producer;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.HashMap;
import java.util.Map;
/**
 * kafaka的拦截器 非栈结构 入拦截器链时的顺序如果是：拦截器1->拦截器2->拦截器3，
 * 返回时也按拦截器1->拦截器2->拦截器3进行返回！！！
 * @author LY_飘涯
 *
 */
public class MyKafkaInterceptorProducer {
    public static void main(String[] args) {

        Map<String, Object> configs = new HashMap<>();
        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.80.131:9092");
        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

//        interceptor.classes
        // 如果有多个拦截器，则设置为多个拦截器类的全限定类名，中间用逗号隔开
        configs.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, "cn.dglydrpy.study.j2ee.kafka.interceptor.ProducerInterceptorOne," +
                "cn.dglydrpy.study.j2ee.kafka.interceptor.ProducerInterceptorTwo," +
                "cn.dglydrpy.study.j2ee.kafka.interceptor.ProducerInterceptorThree");


        configs.put("classContent", "this is lagou's kafka class");

        KafkaProducer<Integer, String> producer = new KafkaProducer<Integer, String>(configs);

        // 需先创建tp_inter主题
        ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>(
                "tp_inter",
                0,
                1001,
                "this is lagou's 1001 message"
        );

        producer.send(record, new Callback() {
            @Override
            public void onCompletion(RecordMetadata metadata, Exception exception) {
                if (exception == null) {
                    System.out.println(metadata.offset());
                }
            }
        });

        // 关闭生产者
        producer.close();


    }
}
