package cn.bone.kafka.kafka01;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

public class CustomProducer {

    public static void main(String[] args) {

        demo03();
    }


    /**
     * 普通异步发送
     */
    public static void demo01(){
        // 0. 配置
        Properties properties = new Properties();
        // 连接集群 bootstrap.servers
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.5:3001,192.168.1.5:3002,192.168.1.5:3003");
        // 指定对应的key和value的序列化 key.serializer
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 1. 创建kafka生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        // 2. 发送数据
        for(int i=0; i<10; i++){
            ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", "hello"+i);
            kafkaProducer.send(producerRecord);
        }

        // 3. 关闭资源
        kafkaProducer.close();
    }


    /**
     * 带回调函数的异步发送
     */
    public static void demo02(){
        // 0. 配置
        Properties properties = new Properties();
        // 连接集群 bootstrap.servers
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.5:3001,192.168.1.5:3002,192.168.1.5:3003");
        // 指定对应的key和value的序列化 key.serializer
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 1. 创建kafka生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        // 2. 发送数据
        for(int i=0; i<10; i++){
            ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", "hello_callback"+i);
            kafkaProducer.send(producerRecord, new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    if(exception == null){
                        System.out.println("success");
                        System.out.println("topic: " + metadata.topic() + " partition: " + metadata.partition());
                    }
                }
            });
        }

        // 3. 关闭资源
        kafkaProducer.close();
    }

    /**
     * 同步发送数据
     */
    public static void demo03(){
        // 0. 配置
        Properties properties = new Properties();
        // 连接集群 bootstrap.servers
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.5:3001,192.168.1.5:3002,192.168.1.5:3003");
        // 指定对应的key和value的序列化 key.serializer
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 1. 创建kafka生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        // 2. 发送数据
        for(int i=0; i<10; i++){
            ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", "hello_sync"+i);
            try {
                // 同步发送的方法，就是在异步发送方法的后面加上get()
                kafkaProducer.send(producerRecord).get();
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (ExecutionException e) {
                e.printStackTrace();
            }
        }

        // 3. 关闭资源
        kafkaProducer.close();
    }
}
