package plus.chendd.kafka.demo.thread;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import plus.chendd.kafka.demo.entity.User;

import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

public class KafkaConProducer {
  private static final int MSG_SIZE = 1000;
  private static ExecutorService executorService = Executors.newFixedThreadPool(4);
  private static CountDownLatch countDownLatch = new CountDownLatch(MSG_SIZE);
  
  
  private static User makeUser(String id) {
    User user = new User(id);
    user.setName("name" + id);
    return user;
  }
  
  
  private static class ProduceWorker implements Runnable {
    
    private ProducerRecord<String, String> record;
    private KafkaProducer<String, String> producer;
    
    public ProduceWorker(ProducerRecord<String, String> record, KafkaProducer<String, String> producer) {
      this.record = record;
      this.producer = producer;
    }
    
    @Override
    public void run() {
      final String id = Thread.currentThread().getId() + "-" + System.identityHashCode(producer);
      try {
        producer.send(record, new Callback() {
          @Override
          public void onCompletion(RecordMetadata metadata, Exception exception) {
            if (null != exception) {
              exception.printStackTrace();
            }
            if (null != metadata) {
              System.out.println(id + "|" + String.format("偏移量：%s,分区：%s", metadata.offset(), metadata.partition()));
            }
          }
        });
        System.out.println(id + ":数据[" + record + "]已发送。");
        countDownLatch.countDown();
      } catch (Exception e) {
        e.printStackTrace();
      }
      
    }
  }
  
  
  public static void main(String[] args) {
    Properties properties = new Properties();
    
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.56.12:9092");
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    
    
    KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
    
    try {
      for (int i = 0; i < MSG_SIZE; i++) {
        User user = makeUser(i + "");
        ProducerRecord<String, String> record = new ProducerRecord<>("order01", null, System.currentTimeMillis(), user.getId() + "", user.toString());
        executorService.submit(new ProduceWorker(record, producer));
      }
    } catch (Exception e) {
      producer.close();
    }
  }
}
