package demo.kafka.streams.utils;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.Random;

public class KafkaUtil {

    private static final Logger logger = LoggerFactory.getLogger(KafkaUtil.class);

    /**
     * 启动消费者读取计数结果
     */
    public static void startConsumer(String topic, boolean deamon) {
        Thread t = new Thread(() -> {
            // 创建消费者读取计数结果
            Properties consumerProps = new Properties();
            consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Constants.KAFKA_BOOTSTRAP_SERVERS);
            consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "word-count-consumer");
            consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
            consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
            consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
            try (KafkaConsumer<String, Long> consumer = new KafkaConsumer<>(consumerProps);) {
                consumer.subscribe(Collections.singletonList(topic));
                while (true) {
                    ConsumerRecords<String, Long> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, Long> record : records) {
                        logger.info("Key={}, Value={}", record.key(), record.value());
                        System.err.println(String.format("=== 收到消息 ===,Key : %s, Value : %s", record.key(), record.value()));
                    }
                    ThreadUtil.safeSleep(500);
                }
            }
        }, "消费者");
        t.setDaemon(deamon);
        t.start();
    }

    /**
     * 启动消费者读取计数结果
     */
    public static void startProducer(String topic, boolean deamon) {
        Thread t = new Thread(() -> {
            // 创建消费者读取计数结果
            Properties producerCfg = new Properties();
            // Kafka服务端的主机名和端口号
            producerCfg.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, Constants.KAFKA_BOOTSTRAP_SERVERS);
            // 客户的ID
            producerCfg.put(CommonClientConfigs.CLIENT_ID_CONFIG, "ProducerDemo");
            // 消息的key和value都是字节数组，为了将Java对象转化为字节数组，可以配置
            // key.serializer和value.serializer两个序列化器，完成转化
            producerCfg.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
                    "org.apache.kafka.common.serialization.StringSerializer");
            // StringSerializer用来将String对象序列化成字节数组
            producerCfg.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
                    "org.apache.kafka.common.serialization.StringSerializer");

            // 可选的 ACKS 配置，通常使用 ‘all‘ 以确保最高可靠性
            producerCfg.put(ProducerConfig.ACKS_CONFIG, "all");

            String msg = "hello world\n" +
                    "hello java\n" +
                    "hello kafka\n" +
                    "hello stream\n" +
                    "hello spring\n" +
                    "hello spring cloud\n" +
                    "hello spring cloud stream\n";
            String[] lines = msg.split("\n");
            Random random = new Random();
            // 生产者核心类
            try (KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(producerCfg);) {
                while (true) {
                    int i = random.nextInt(lines.length);
                    String msgKey = "key" + i;
                    String msgValue = lines[i];
                    ProducerRecord<String, String> producerRecord = new ProducerRecord<>(topic, msgKey, msgValue);
                    kafkaProducer.send(producerRecord);
                    logger.info("send msg, key={}, value={}", msgKey, msgValue);
                    ThreadUtil.safeSleep(5000);
                }
            }
        }, "生产者");
        t.setDaemon(deamon);
        t.start();
    }
}
