package com.hntech.kafka;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/**
 * @Autho changqi.wu
 * @Date 路在脚下，使劲踩！
 */
public class Producer extends Thread {

    private KafkaProducer<Integer, String> producer;
    private String topic;


    public Producer(String topic) {
        this.topic = topic;
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.5:9092");
        properties.put(ProducerConfig.CLIENT_ID_CONFIG, "kafka-producer");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        producer = new KafkaProducer<>(properties);


    }

    @Override
    public void run() {
        int num = 0;
        String msg = "kafka hntech msg " + num;
        while (num < 20) {
            try {
                RecordMetadata meta = producer.send(new ProducerRecord<>(topic, msg)).get();
                System.out.println(meta.topic() + " -> " + meta.offset() + " -> " + meta.partition());

                TimeUnit.SECONDS.sleep(2);
                ++num;
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (ExecutionException e) {
                e.printStackTrace();
            }
        }
    }


    public static void main(String[] args) {
        new Producer("topic-1").start();
    }

    //3.5.43

}
