package com.doit.day01;

import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

public class ProducerDemo {
    public static void main(String[] args) throws InterruptedException {
        Properties props = new Properties();
        //kafka集群的地址
//        props.setProperty("bootstrap.servers","linux01:9092,linux02:9092,linux03:9092");
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"linux01:9092,linux02:9092,linux03:9092");
        //key的序列化方式
//        props.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
        //value的序列化方式
//        props.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
        /**
         * 下面选择配置
         */
        //ack的应答机制
        props.setProperty(ProducerConfig.ACKS_CONFIG,"-1");
//        props.setProperty("partitioner.class","com.doit.day01.MyPartitioner");
//        props.setProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG,MyPartitioner.class.getName());
//创建kafka生产者的对象
        KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
        for (int i = 0; i < 10000; i++) {
            //封装发送的信息
            ProducerRecord<String, String> record = new ProducerRecord<>("doit39", RandomStringUtils.randomAlphabetic(10) +i);
            //往里面发送数据
            producer.send(record);
            Thread.sleep(20);
        }

        //flush close
        producer.flush();
        producer.close();
    }
}
