package com.example.kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

/**
 * @Description: 自定义生产者
 * @Author: changlu
 * @Date: 10:27 AM
 */
public class CustomProducer {
    /**
     * sasl认证  生产者
     * @param args
     */
    public static void main(String[] args) {
        //1、创建Kafka的生产者配置对象
        Properties properties = new Properties();

        //2、添加对象配置参数：bootstrap.servers、key与value序列化器
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.200.130:19093,192.168.200.130:19092,192.168.200.130:19094");

        // Key和Value的序列化器
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // SASL认证配置
        properties.put("security.protocol", "SASL_PLAINTEXT");
        properties.put("sasl.mechanism", "PLAIN");
        properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required " +
                "username=\"admin\" password=\"123456\";");

        //3、创建Kafka生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        //4、使用send方法进行消息发送
        for (int i = 0; i < 5; i++) {
            kafkaProducer.send(new ProducerRecord<>("first", "changlu" + i));
        }

        //5、关闭资源
        kafkaProducer.close();
    }
  /*  public static void main(String[] args) {
        // 配置 Kafka 连接参数
        Properties props = new Properties();
        props.put("bootstrap.servers", "172.26.117.2:19092"); // Kafka 集群地址
        props.put("acks", "all"); // 等待所有副本确认消息已写入
        props.put("key.serializer", StringSerializer.class.getName()); // 消息 key 序列化方式
        props.put("value.serializer", StringSerializer.class.getName()); // 消息 value 序列化方式

        // 创建 Kafka 生产者实例
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);

        try {
            // 创建要发送的消息记录
            String topic = "first"; // 消息的 topic
            String key = "key"; // 消息的 key
            String value = "Hello, Kafka!asda"; // 消息的 value

            ProducerRecord<String, String> record = new ProducerRecord<>(topic, key, value);

            // 发送消息，并提供回调函数处理发送结果
            producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    if (exception != null) {
                        exception.printStackTrace();
                    } else {
                        System.out.println("Message sent successfully to topic: " + metadata.topic() +
                                " with offset: " + metadata.offset());
                    }
                }
            });

            // 关闭生产者
            producer.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
*/
/*    public static void main(String[] args) throws InterruptedException {
        Properties prop = new Properties();

        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.186.1.69:9094");
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        prop.put(ProducerConfig.ACKS_CONFIG, "all");
        prop.put(ProducerConfig.RETRIES_CONFIG, 0);
        prop.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        prop.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        prop.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);

        String topic = "hello";

        KafkaProducer<String, String> producer = new KafkaProducer<>(prop);
        for (int i = 0; i < 100; i++) {
            producer.send(new ProducerRecord<String, String>(topic, Integer.toString(2), "hello kafka" + i));
            System.out.println("生产消息：" + i);
            Thread.sleep(1000);
        }
        producer.close();
    }*/


}