package com.heima.stream.listener;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.*;
import org.springframework.cloud.stream.annotation.EnableBinding;
import org.springframework.cloud.stream.annotation.Input;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.cloud.stream.annotation.StreamListener;
import org.springframework.messaging.handler.annotation.SendTo;

import java.time.Duration;
import java.util.Date;

@EnableBinding(CountNumListener.CountNumProcess.class)
public class CountNumListener {

    public static final String INPUT_TOPIC = "count-num-stream-topic";
    public static final String OUTPUT_TOPIC = "count-num-stream-consumer";


    @SendTo(OUTPUT_TOPIC)
    @StreamListener(INPUT_TOPIC)
    public KStream<String, String> process(KStream<String, String> stream){
        //                        输入的msg的key  输入的msg的value  转化以后的key value
        return stream.map(new KeyValueMapper<String, String, KeyValue<String,Integer>>() {
                    @Override
                    //                              输入的msg的key  输入的msg的value
                    public KeyValue<String, Integer> apply(String key, String value) {
                        System.out.println(new Date() + " 收到原始数据："+value);
                        String[] arr = value.split(":");
                        // key:  c1   value:  200
                        return new KeyValue<>(arr[0], Integer.parseInt(arr[1]));
                    }
                    // 设置聚合之后的key和value的类型
                }).groupByKey(Grouped.with(Serdes.String(), Serdes.Integer()))
                // 对多长时间之内的数据做聚合
                .windowedBy(TimeWindows.of(Duration.ofSeconds(10)).grace(Duration.ZERO))
                // 聚合逻辑
                .aggregate(new Initializer<Integer>() {
                    // 时间窗口内的初始值
                    @Override
                    public Integer apply() {
                        System.out.println(new Date() + " 时间窗口初始化");
                        return 0;
                    }
                }, new Aggregator<String, Integer, Integer>() {
                    // 当收到一条消息之后，该如何计算
                    @Override
                    // key:收到的消息的key， value:收到的消息的value, aggregate:聚合值
                    public Integer apply(String key, Integer value, Integer aggregate) {
                        System.out.println(new Date() + " 时间窗口做聚合，key:"+key +", new value="+value+", old aggregate:"+aggregate+",new aggregate: "+(aggregate + value));
                        return value + aggregate;
                    }
                }, Materialized.with(Serdes.String(), Serdes.Integer()))
                // 时间窗口结束之后再把最终结果输出出来，不输出中间的结算结果
                .suppress(Suppressed.untilWindowCloses(Suppressed.BufferConfig.unbounded()))
                .toStream()
                // 需要把输出的消息的类型转化成<String，String>才能被消费者消费
                .map(new KeyValueMapper<Windowed<String>, Integer, KeyValue<String, String>>() {
                    @Override
                    public KeyValue<String, String> apply(Windowed<String> key, Integer value) {
                        System.out.println(new Date() + " 时间窗口结束，key:"+key.key() + ",value:" + value+", 开始时间："+
                                key.window().startTime()+",结束时间："+key.window().endTime());
                        return new KeyValue<>(key.key(), ""+value);
                    }
                });

    }



    public interface CountNumProcess{
        @Input(INPUT_TOPIC)
        KStream<String,String> input();

        @Output(OUTPUT_TOPIC)
        KStream<String,String> putout();
    }

}
