package com.example.kafka2.util;

import com.example.kafka2.model.Product;
import com.example.kafka2.model.ProductSource;
import com.google.gson.Gson;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

public class KafkaUtils {

    public static KafkaProducer<Integer, String> producer;

    static {
        Map<String, Object> configs = new HashMap<>();
        // 指定初始连接用到的broker地址，如果是集群，则可以通过此初始连接发现集群中的其他broker
        configs.put("bootstrap.servers", "10.201.0.123:9092,10.201.0.124:9092,10.201.0.125:9092");
        configs.put("acks","1");
        configs.put("batch.size",16384);
        configs.put("linger.ms",1);
        configs.put("buffer.memory",33554432);
        // 指定key的序列化类
        configs.put("key.serializer", IntegerSerializer.class);
        // 指定value的序列化类
        configs.put("value.serializer", StringSerializer.class);

        // 创建kafkaProducer对象
        producer = new KafkaProducer<Integer, String>(configs);
    }

    public static RecordMetadata sendmsg(String topic,String msg) throws ExecutionException, InterruptedException {
        Map<String, Object> configs = new HashMap<>();
        // 指定初始连接用到的broker地址，如果是集群，则可以通过此初始连接发现集群中的其他broker
        configs.put("bootstrap.servers", "10.201.0.123:9092,10.201.0.124:9092,10.201.0.125:9092");
        configs.put("acks","1");
        configs.put("batch.size",16384);
        configs.put("linger.ms",1);
        configs.put("buffer.memory",33554432);
        // 指定key的序列化类
        configs.put("key.serializer", IntegerSerializer.class);
        // 指定value的序列化类
        configs.put("value.serializer", StringSerializer.class);

        // 创建kafkaProducer对象
        KafkaProducer<Integer, String> producer = new KafkaProducer<Integer, String>(configs);

        // 用于设置用户自定义的消息头字段
        List<Header> headers = new ArrayList<>();
        headers.add(new RecordHeader("biz.name", "producer.demo".getBytes()));

        // 封装消息
        ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>(
                topic, // 主题名称
                msg// 字符串作为value
        );
        Future<RecordMetadata> future = producer.send(record);
        RecordMetadata recordMetadata = future.get();
        // 关闭生产者
        producer.close();
        return recordMetadata;
    }

    public static void sendybmsg(String topic,String msg,int incr) throws ExecutionException, InterruptedException {
        // 封装消息
        ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>(
                topic, // 主题名称
                msg// 字符串作为value
        );

        // 消息的异步确认
        producer.send(record, new Callback() {
            @Override
            public void onCompletion(RecordMetadata metadata, Exception e) {
                if (e==null) {
                    System.out.println("当前生成数据roductNo是："+incr+"成功发送product消息，主题：" + metadata.topic()+"消息的分区：" + metadata.partition()+"消息的偏移量：" + metadata.offset());
                }else{
                    System.out.println("异常消息:"+e.getMessage());
                }
            }
        });
    }
}
