package top.tagao.kafka.producer.myPartition;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Properties;
import java.util.Random;

/**
 * @author Viktor_Liu
 * @Date 2022/8/24 23:21
 */
public class CustomProducer02MyPartition {
    public static void main(String[] args) {

        // 1配置信息
        Properties properties = new Properties();
        //服务信息
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.233.94:9092");
        //配置序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //自定义分区规则
        //properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, MyPartitioner.class.getName());


        // batch.size：批次大小，默认 16K
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
// linger.ms：等待时间，默认 0
        properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
// RecordAccumulator：缓冲区大小，默认 32M：buffer.memory
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);

// compression.type：压缩，默认 none，可配置值 gzip、snappy、lz4 和 zstd
        properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");

        properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,60000);


        // 2. 创建 kafka 生产者的配置对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);

        // 模板消息数据
        String template = "{\n" +
                "\t\"app_id\": %d,\n" +
                "\t\"user_id\": %d,\n" +
                "\t\"event_id\": %d,\n" +
                "\t\"pv_id\": %d,\n" +
                "\t\"trigger_time\": \"%s\",\n" +
                "\t\"attr_value01\": \"%s\",\n" +
                "\t\"attr_value02\": \"%s\",\n" +
                "\t\"attr_value03\": \"%s\",\n" +
                "\t\"attr_value04\": \"%s\",\n" +
                "\t\"attr_value05\": \"%s\"\n" +
                "}";
        // 发送消息
        LocalDateTime currentTime = LocalDateTime.of(2023, 7, 8, 0, 0); // 设置起始时间为2023年7月8日的零点
        Random random = new Random();

        int total = 0;
        for (int i = 60; i < 80; i++) { // 外循环30次，代表三十天

            LocalDateTime currentDay = currentTime.plusDays(i); // 根据外循环递增当前日期

            for (int j = 0; j < 90000; j++) { // 内循环90000次，一天发送9万条数据
                int finalTotal = total;
                String triggerTime = currentDay.format(DateTimeFormatter.ofPattern("yyyy-MM-dd ")); // 获取当前日期部分，不包含时分秒
                int appId = (i % 10 == 0) ? 110 : 100;
                int eventId = (appId == 110) ? 211 + (i / 10) % 4 : 201 + (i / 10) % 3;
                int userId = 301 + random.nextInt(10);
                int pvId = userId;

                // 根据内循环递增设置时分秒
                triggerTime += String.format("%02d:%02d:%02d", (j / 3600) % 24, (j / 60) % 60, j % 60);
                //System.out.println(triggerTime);
                String attrValue01 = generateRandomAttributeValue();
                String attrValue02 = generateRandomAttributeValue();
                String attrValue03 = generateRandomAttributeValue();
                String attrValue04 = generateRandomAttributeValue();
                String attrValue05 = generateRandomAttributeValue();

                String message = String.format(template, appId, userId, eventId, pvId, triggerTime, attrValue01, attrValue02, attrValue03, attrValue04, attrValue05);
                System.out.println(message);

//                kafkaProducer.send(new ProducerRecord<>("miu_event_log_test", message), new Callback() {
//                    @Override
//                    public void onCompletion(RecordMetadata recordMetadata, Exception e) {
//                        if (e != null)
//                            System.err.println("Error sending message: " + e.getMessage());
//                    }
//                });
                total++;

            }
            System.out.println(total);
            try {
                Thread.sleep(12); // 延时10毫秒，避免Kafka无法接受到过多的消息
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        kafkaProducer.close();
    }

    private static String generateRandomAttributeValue() {
        Random random = new Random();
        char c1 = (char) (random.nextInt(26) + 'A');
        char c2 = (char) (random.nextInt(26) + 'A');
        return String.valueOf(c1) + String.valueOf(c2) + "测试测试";
    }

}