package com.ssm.producer;

import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Properties;
import java.util.concurrent.Future;
/**
 * @Author: BinZhang
 * @Date: 2018/11/30 2:06
 * @Description:kafka生产者
 *
 */
public class KafkaProducerDemo {
    private KafkaProducer<Integer,String> kafkaProducer;
    private String topic;

    public KafkaProducerDemo(String topic) {
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"http://47.106.174.143:9092");
        //当前客户端的id
        properties.put(ProducerConfig.CLIENT_ID_CONFIG,"KafkaProducerDemo");
        //确认
        properties.put(ProducerConfig.ACKS_CONFIG,"-1");
        //key 序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.IntegerSerializer");
        //value 序列化
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer");

        kafkaProducer =  new KafkaProducer(properties);
        this.topic = topic;
    }
    //发送消息
    public void startKafkaSendMessage(){
        for(int i=0;i<1000;i++){
            String message = "kafaka-msg:  "+i;
            System.out.println(message);
            Future<RecordMetadata> send = kafkaProducer.send(new ProducerRecord<Integer, String>(topic, message));
            //System.out.println(JSON.toJSONString(send));
            try {
                Thread .sleep(500);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    public static void main(String[] args) {
        new KafkaProducerDemo("test").startKafkaSendMessage();
    }

}
