package com.gyg.kafkademo.product;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.Random;
import java.util.UUID;


public class MyProduct {
    //创建模拟应用服务的日志生产者:
    //创建topic
    private static final String TOPIC = "log-demo1";
    private static final String KAFKA_SERVICE = "192.168.6.38:9092";
    private static final ObjectMapper objectMapper = new ObjectMapper();

    // 模拟的服务名称列表
    private static final String[] SERVICE = {"user-service", "order-service", "product-service", "payment-service"};
    // 日志级别列表
    private static final String[] LEVELS = {"INFO", "WARN", "ERROR"};

    //配置
    private static Properties getProperties(){
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,KAFKA_SERVICE);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 增加连接超时设置
        props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);
        props.put(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, 10000);
        props.put(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG, 1000);
        props.put(ProducerConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG, 10000);

        // 可选配置：提高吞吐量
        props.put(ProducerConfig.LINGER_MS_CONFIG, 20);
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, 32 * 1024); // 32KB
        props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");

        return props;
    }

    //随机生成日志消息
    private static String getLogs(){
        Random random = new Random();
        String service = SERVICE[random.nextInt(SERVICE.length)];
        String level = LEVELS[random.nextInt(LEVELS.length)];

        //生成log
        String message;
        switch (level){
            case "INFO":
                message = "用户操作成功-" + UUID.randomUUID();
                break;
            case "WARN":
                message = "警告" + (70 + random.nextInt(30)) + "%";
                break;
            case "ERROR":
                message = "连接失败-" + (random.nextBoolean() ? "timeout" : "authentication");
                break;
            default:
                message = "未找到具体错误";
        }
        return message;
    }

    public static void main(String[] args) throws InterruptedException {
        //创建一个生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(getProperties());

        // 持续生成日志
        int messageCount = 0;

        while (true){
            String service = SERVICE[new Random().nextInt(SERVICE.length)];
            String key = service + "-" + UUID.randomUUID().toString().substring(0, 8);
            ProducerRecord<String,String> record = new ProducerRecord<>(TOPIC, key, getLogs());

            //异步发送
            producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e != null){
                        System.err.println("Error sending message: " + e.getMessage());
                    }else {
                        System.out.println("发送到topic: " + metadata.topic() +
                                ", partition: " + metadata.partition() +
                                ", offset: " + metadata.offset()+
                                ", key: " + key);
                    }
                }
            });
            messageCount++;
            //每次发送一百条消息后提醒一次
            if (messageCount % 100 == 0) {
                System.out.println("Produced " + messageCount + " messages");
            }
            // 每秒产生1-5条日志
            Thread.sleep(200 + new Random().nextInt(800));
        }

    }

}
