package cn.enjoyedu.concurrent;

import cn.enjoyedu.config.BusiConst;
import cn.enjoyedu.config.KafkaConst;
import cn.enjoyedu.vo.DemoUser;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @author King老师   享学课堂 https://enjoy.ke.qq.com
 * 往期课程咨询芊芊老师  QQ：2130753077 VIP课程咨询 依娜老师  QQ：2133576719
 * 类说明：多线程下使用生产者
 */
public class KafkaConProducer {

    //发送消息的个数
    private static final int MSG_SIZE = 1000;
    
    //负责发送消息的线程池: 可以拼命放send的任务
    private static ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
    
    //卡一下 不让主线程退出
    private static CountDownLatch countDownLatch = new CountDownLatch(MSG_SIZE);
    
    private static DemoUser makeUser(int id){
        DemoUser demoUser = new DemoUser(id);
        String userName = "xiangxue_"+id;
        demoUser.setName(userName);
        return demoUser;
    }

    /*发送消息的任务*/
    private static class ProduceWorker implements Runnable{

        private ProducerRecord<String,String> record;
        private KafkaProducer<String,String> producer;

        public ProduceWorker(ProducerRecord<String, String> record,
                             KafkaProducer<String, String> producer) {
            this.record = record;
            this.producer = producer;
        }

        @Override
        public void run() {
            
            final String id = Thread.currentThread().getId()
                    +"-"+System.identityHashCode(producer);
            try {
                
                //MARK 多个线程都在send 没事, 这个KafkaProducer 是天生线程安全的
                producer.send(record, (metadata, exception) -> {
                    
                    if(null!=exception){
                        exception.printStackTrace();
                    }
                    if(null!=metadata){
                        System.out.println(id+"|"
                                +String.format("偏移量：%s,分区：%s",
                                metadata.offset(),metadata.partition()));
                    }
                });
                System.out.println(id+":数据["+record+"]已发送。");
                
                countDownLatch.countDown();
                
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }

    public static void main(String[] args) {
        
        Properties producerConfig = KafkaConst.producerConfig(StringSerializer.class, StringSerializer.class);
        try (KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfig)) {
            
            //循环发送，通过线程池的方式
            for (int i = 0; i < MSG_SIZE; i++) {
                
                DemoUser demoUser = makeUser(i);
                
                ProducerRecord<String, String> record = new ProducerRecord<>(
                        BusiConst.CONCURRENT_USER_INFO_TOPIC, 
                        null,
                        System.currentTimeMillis(),
                        demoUser.getId() + "", 
                        demoUser.toString()
                );
                
                executorService.submit(new ProduceWorker(record, producer));
            }
            
            //卡一下, 每个都结束前 主线程不会结束
            countDownLatch.await();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            executorService.shutdown();
        }
    }




}
