package 生产者;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.ExecutionException;


public class Kafka01_Productor {
    public static void main(String[] args) {

// 配置Kafka生产者的属性
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "114.251.235.19:9092"); // Kafka服务器地址
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 创建Kafka生产者实例
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);

        // 要发送的消息和目标主题
        String topic = "topic_test";
        String key = "messageKey";
        String value = "Hello, Kafka!";

        // 创建ProducerRecord对象
        ProducerRecord<String, String> record = new ProducerRecord<>(topic, key, value);

        try {
            // 发送消息并等待结果（同步发送）
            RecordMetadata metadata = producer.send(record).get();

            // 打印消息发送的结果
            System.out.println("Message sent to topic:" + metadata.topic() +
                    ", partition:" + metadata.partition() +
                    ", offset:" + metadata.offset());
        } catch (ExecutionException | InterruptedException e) {
            e.printStackTrace();
        } finally {
            // 关闭生产者
            producer.close();
        }
    }
}
