package com.example.java.kafka.kafkaclients;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;

import java.time.Duration;
import java.util.Arrays;
import java.util.Locale;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;

public class WordCountExample {
    public static void main(String[] args) {
        // 配置 Kafka Streams
        Properties props = new Properties();
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "43.139.50.31:9092");
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "application-id");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        // 构建拓扑
        final StreamsBuilder builder = new StreamsBuilder();
        // source 表示从名为 "streams-plaintext-input" 的 topic读取的键值对（如 <null, "Hello World Hello">）
        KStream<String, String> kStreamSource = builder.stream("streams-plaintext-input");
        // 处理数据流：分割文本、转换为小写、扁平化、分组、计数
        KTable<String, Long> counts = kStreamSource
                // 打印原始输入
                .peek((key, value) -> System.out.println("原始输入: key=" + key + ", value=" + value))
                // 将每条记录的 value 拆分成多个单词
                .flatMapValues(value -> Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+")))
                // 打印 flatMap 后的结果
                //.peek((key, word) -> System.out.println("拆词后: key=" + key + ", word=" + word))
                // 按单词（value）进行分组，为后续聚合做准备
                .groupBy((key, word) -> word)
                // 对每个单词统计出现次数, 但需要注意 groupBy 后不能直接 peek，但可以在 count 后打印 KTable
                .count(Materialized.as("counts-store")); // 状态存储名称
        // 将结果写入名为 "streams-word-count-output" 的 topic
        counts.toStream().foreach((word, count) -> System.out.println("单词计数更新: " + word + " => " + count));
        counts.toStream().to("streams-word-count-output", Produced.with(Serdes.String(), Serdes.Long()));
        // 构建最终的拓扑
        final Topology topology = builder.build();
        // 构建 Kafka Streams 实例
        final KafkaStreams streams = new KafkaStreams(topology, props);
        // 用于优雅关闭的 latch
        final CountDownLatch latch = new CountDownLatch(1);
        // 添加关闭钩子
        Runtime.getRuntime().addShutdownHook(new Thread("streams-word-count-shutdown-hook") {
            @Override
            public void run() {
                streams.close(Duration.ofSeconds(10));
                latch.countDown();
            }
        });
        try {
            streams.start();
            System.out.println("拓扑结构：\n" + topology.describe());
            latch.await(); // 等待关闭信号
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
            System.exit(1);
        }
        System.exit(0);
    }
}