package com.myhexin.demo.kafkastream.listener;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;

import java.time.Duration;
import java.util.Arrays;

/**
 * @author zhangzhonglv@myhexin.com
 * @date 2025/8/31
 */
@Configuration
@Slf4j
public class KafkaStreamListener {

    private static final String INPUT_TOPIC = "streams-plaintext-input";
    private static final String OUTPUT_TOPIC = "streams-pipe-output";

    @KafkaListener(topics = OUTPUT_TOPIC)
    public void listen(ConsumerRecord<String, String> record) {
        log.info("message received, key:{}, value:{}", record.key(), record.value());
    }

    @Bean
    public KStream<String, String> kStream(StreamsBuilder streamsBuilder) {
        KStream<String, String> stream = streamsBuilder.stream(INPUT_TOPIC);
        // KStream<String, String> serializedStream = stream.mapValues(jsonString -> {
        //     // 分组依据
        //     if (JSONUtil.parseObj(jsonString).containsKey("details")) {
        //         JSONObject details = JSONUtil.parseObj(jsonString).getJSONObject("details");
        //         if (details.containsKey("loginType")) {
        //             String loginType = details.getStr("loginType");
        //             return loginType;
        //         }
        //         return "";
        //     }
        //     else {
        //         return "";
        //     }
        // });
        /**
         * 处理消息的value
         */
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
                    @Override
                    public Iterable<String> apply(String value) {
                        return Arrays.asList(value.split(" "));
                    }
                }).filter((key, value) -> !value.equals(""))
                // 按照value进行聚合处理
                .groupBy((key, value) -> value)// 这进而的value是kafka的消息内容
                // 时间窗口
                .windowedBy(TimeWindows.of(Duration.ofSeconds(5)))
                // 统计单词的个数
                .count()
                // 转换为kStream
                .toStream().map((key, value) -> {
                    // key是分组的key,它是一个window对象，它里面有分组key和时间窗口的开始时间和结束时间，方便后期我们统计，value是分组count的结果
                    return new KeyValue<>(key.toString(), value.toString());
                })
                // 发送消息
                .to(OUTPUT_TOPIC);
        return stream;
    }

}
