package com.lagou.kafka.demo.producer;

import com.lagou.kafka.demo.entity.User;
import com.lagou.kafka.demo.serializer.UserSerializer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.HashMap;

public class MyProducer {
    public static void main(String[] args) {
        HashMap<String, Object> configs = new HashMap<String, Object>();
        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop001:9092");
        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        // 设置⾃定义的序列化类
        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, UserSerializer.class);
        KafkaProducer<String, User> producer = new KafkaProducer<String, User>(configs);
        User user = new User();
        user.setUserId(1001);
        user.setUsername("张三");
        ProducerRecord<String, User> record = new ProducerRecord<>(
                "topic_1",
                0,
                user.getUsername(),
                user
        );

        producer.send(record, (metadata,exception)->{
            if( exception == null ){
                System.out.println("消息发送成功: " +
                        metadata.topic() + "\t" +
                        metadata.partition() + "\t" +
                        metadata.offset() + "\t" );

            }else{
                System.out.println("消息发送异常");
            }
        });

        // 关闭⽣产者
        producer.close();
    }
}
