package com.heima.kafkastream;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;

import java.time.Duration;
import java.util.Date;
import java.util.Properties;

/**
 * 核心的处理节点
 *
 * @author Fuqiang
 * @since :2023-04-03
 */
public class CountNumApplication {

    private static final String INPUT_TOPIC = "hot.article.score.stream.topic";
    private static final String OUT_TOPIC = "count-num-stream-consumer";
    private static final String KAFKA_IP = "192.168.137.136:9092";

    public static void main(String[] args) {

        try {
            StreamsBuilder streamsBuilder = new StreamsBuilder();
            // c1:1   c2:2
            // 设置输入的topic
            KStream<String, String> stream = streamsBuilder.stream(INPUT_TOPIC);
            // 做了数据格式的转换，把key从value中拆分出来，方便下一步做分组聚合
            stream.map(new KeyValueMapper<String, String, KeyValue<String, Integer>>() {
                        @Override
                        public KeyValue<String, Integer> apply(String key, String value) {
                            System.out.println(new Date() + "-收到原始数据:" + value);
                            String[] split = value.split(":");
                            return new KeyValue<>("c1", 1);
                            // 为了清理掉错误数据
                            //return new KeyValue<>(split[0], Integer.valueOf(split[1]));
                        }
                        // 做分组聚合
                    }).groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
                    // 设置时间窗口, 十秒为单位
                    .windowedBy(TimeWindows.of(Duration.ofSeconds(10)).grace(Duration.ZERO))
                    // 设置聚合算法
                    .aggregate(new Initializer<Integer>() {
                        @Override
                        public Integer apply() {
                            System.out.println(new Date() + "--时间窗口初始化");
                            return 0;
                        }
                    }, new Aggregator<String, Integer, Integer>() {
                        // 时间窗口内的聚合
                        @Override
                        public Integer apply(String key, Integer value, Integer aggregate) {
                            System.out.println(new Date() + "时间窗口内的聚合, 当前key = " + key
                                    + "当前value = " + value + "当前aggregate = " + aggregate);
                            return value + aggregate;
                        }
                    }, Materialized.with(Serdes.String(), Serdes.Integer()))
                    // 只输出最终的计算结果, 不输出中间计算过程的数据
                    .suppress(Suppressed.untilWindowCloses(Suppressed.BufferConfig.unbounded()))
                    // 重新修改结果的数据类型, 因为默认的key为时间窗口的类型
                    .toStream().map(new KeyValueMapper<Windowed<String>, Integer, KeyValue<String, String>>() {
                        @Override
                        public KeyValue<String, String> apply(Windowed<String> stringWindowed, Integer value) {
                            String key = stringWindowed.key();
                            System.out.println("时间窗口聚合结束, key = " + key + "value = " + value);

                            return new KeyValue<>(key, "" + value);
                        }
                    }).to(OUT_TOPIC);

            Properties properties = new Properties();
            properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "count-num-app");
            properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_IP);
            properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
            properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

            KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(), properties);
            kafkaStreams.start();
            System.out.println("kafka stream start");
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}
