package sender;

import com.google.common.util.concurrent.RateLimiter;
import com.google.gson.Gson;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;

public class KafkaTest {

    private static AtomicInteger integer = new AtomicInteger(0);

    private static volatile boolean run = true;

    private static Gson gson = new Gson();

    public static void main(String[] args) throws InterruptedException {
        String boorStrapServer = System.getProperty("bootstrap-server", "128.12.128.91:9092");
        int sendThread = Integer.parseInt(System.getProperty("sender.thread", "1"));
        String rate = System.getProperty("rate", "500000");
        String topic = System.getProperty("kafka.topic", "yyds");
        int recordSize = Integer.parseInt(System.getProperty("record.size", "1"));

        Properties properties = new Properties();
        properties.put("bootstrap.servers", boorStrapServer);
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        final KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);

        ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(10, 20, 100, TimeUnit.SECONDS,
                new ArrayBlockingQueue<Runnable>(100), new ThreadFactory() {
            @Override
            public Thread newThread(Runnable r) {
                String threadName = "kafka" + integer.getAndIncrement();
                Thread thread = new Thread(r);
                thread.setName(threadName);
                return thread;
            }
        });

        AtomicLong recordSum = new AtomicLong(0);
        RateLimiter rateLimiter = RateLimiter.create(Double.parseDouble(rate));

        long startTime = System.currentTimeMillis();
        for (int i = 0; i < sendThread; i++) {
            threadPoolExecutor.execute(() -> {
                while (run) {
                    double acquire = rateLimiter.acquire();
                    DemoDto demoDto = DemoDto.generateDemoDto(recordSize);
                    ProducerRecord<String, String> record = new ProducerRecord<>(topic, demoDto.getName(), gson.toJson(demoDto));
                    kafkaProducer.send(record, (metadata, exception) -> {
                        if (metadata != null) {
                            recordSum.set(metadata.offset());
                        } else {
                            exception.printStackTrace();
                        }
                    });
//                    kafkaProducer.send(record);
                }
            });
        }
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            run = false;
            threadPoolExecutor.shutdown();
            long l = recordSum.get();
            System.out.println("cost time:" + (System.currentTimeMillis() - startTime) + " offset:" + recordSum);
        }));

        CountDownLatch countDownLatch = new CountDownLatch(1);
        countDownLatch.await();
    }
}
