package com.atguigu.kafka.pro.question;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * @author ：剧情再美终是戏
 * @description：数据生产者
 * @mail : 13286520398@163.com
 * @date ：Created in 2020/1/24 20:39
 * @modified By：
 * @version: 1.0
 */
public class Produce {

    // 创建一个数据生产者，生产数据单分区
    public static void produce(String topic, int total) throws InterruptedException {
        Properties props = new Properties();
        props.put("acks", "all");
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop101:9092");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("linger.ms", 1000);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        KafkaProducer<Integer, String> producer = new KafkaProducer(props);
        for (int i = 0; i < total; i++) {
            ProducerRecord<Integer, String> record = new ProducerRecord<>(topic, 0, null, i + "-" + System.currentTimeMillis());
            System.out.print(".");
            producer.send(record);
            Thread.sleep(1);
        }
        producer.close();
    }


    public static void main(String[] args) throws InterruptedException {
        // 生产消费
        produce(Constan.topic, Constan.produceMessageTotal);
    }
}
