package com.hhf.rrd.usage.window;

import com.hhf.rrd.model.Access;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.eventtime.*;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.serialization.TypeInformationSerializationSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.formats.json.JsonRowDataDeserializationSchema;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPunctuatedWatermarks;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.runtime.operators.util.AssignerWithPunctuatedWatermarksAdapter;
import org.apache.flink.streaming.util.serialization.JSONKeyValueDeserializationSchema;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import javax.annotation.Nullable;
import java.time.Duration;
import java.util.Properties;

/**
 * 窗口算子
 *
 * @author huanghaifeng15
 * @date 2022/2/10 16:54
 **/
public class WindowApp {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        ExecutionConfig executionConfig = env.getConfig();


        DataStreamSource<String> source = env.socketTextStream("localhost", 9999);
        source.assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(20))
                .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String element, long recordTimestamp) {
                        return Long.parseLong(element.split(",")[0]);
                    }
                }));

        // kafka source
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "test");

        FlinkKafkaConsumer<ObjectNode> myConsumer = new FlinkKafkaConsumer<>("topic", new JSONKeyValueDeserializationSchema(true){
            @Override
            public ObjectNode deserialize(ConsumerRecord<byte[], byte[]> record) throws Exception {
                return super.deserialize(record);
            }
        } , properties);

        myConsumer.assignTimestampsAndWatermarks(new WatermarkStrategy<ObjectNode>() {
            @Override
            public WatermarkGenerator<ObjectNode> createWatermarkGenerator(WatermarkGeneratorSupplier.Context context) {
                return new PunctuatedAssigner();
            }
        });
        DataStream<ObjectNode> kafkaDataStream = env.addSource(myConsumer);


        SingleOutputStreamOperator<String> singleOutputStreamOperator = source.assignTimestampsAndWatermarks(
                new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
                    @Override
                    public long extractTimestamp(String element) {
                        return Long.parseLong(element.split(",")[0]);
                    }
                }
        );

        SingleOutputStreamOperator<Tuple2<String, Integer>> mapStream = singleOutputStreamOperator.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                String[] splits = value.split(",");
                return Tuple2.of(splits[0].trim(), Integer.parseInt(splits[1].trim()));
            }
        });

        // [0000,5000) [5000,10000)
        OutputTag<Tuple2<String,Integer>> outputTag = new OutputTag<Tuple2<String, Integer>>("late-data"){};
        SingleOutputStreamOperator<String> window = mapStream.keyBy(x -> x.f0)
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .sideOutputLateData(outputTag)
                .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
                        System.out.println("-----reduce invoked----" + value1.f0 + "==>" + (value1.f1 + value2.f1));
                        return Tuple2.of(value1.f0, value1.f1 + value2.f1);
                    }
                }, new ProcessWindowFunction<Tuple2<String, Integer>, String, String, TimeWindow>() {

                    FastDateFormat format = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss");

                    @Override
                    public void process(String s, Context context, Iterable<Tuple2<String, Integer>> elements, Collector<String> out) throws Exception {
                        for (Tuple2<String, Integer> element : elements) {
                            out.collect("[" + format.format(context.window().getStart()) + "==>" + format.format(context.window().getEnd()) + "], " + element.f0 + "==>" + element.f1);
                        }
                    }
                });
        window.print();
        DataStream<Tuple2<String, Integer>> sideOutput = window.getSideOutput(outputTag);
        sideOutput.printToErr();

        /**** 窗口算子  *****/
        // 滚动窗口
        // keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(5))).sum(1);
        //
        // keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(5)));
        //
        // keyedStream.window(TumblingProcessingTimeWindows.of(Time.of(1, TimeUnit.MINUTES), Time.of(10, TimeUnit.SECONDS)));
        //
        // // 滑动窗口
        // keyedStream.window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.MINUTES),Time.of(3, TimeUnit.MINUTES)));

        // 窗口后，进行reduce
        // 此窗口将尝试在窗口策略允许的范围内增量聚合数据。
            // 例如，滚动时间窗口可以聚合数据，这意味着每个键只存储一个元素。
                // 滑动时间窗口将根据滑动间隔的粒度进行聚合，因此每个键（每个滑动间隔一个）存储几个元素。
                // 自定义窗口可能无法增量聚合，或者可能需要在聚合树中存储额外的值
        // keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(5)))
        //         .reduce(new ReduceFunction<Tuple2<String, Integer>>() {
        //     @Override
        //     public Tuple2<String, Integer> reduce(Tuple2<String, Integer> value1, Tuple2<String, Integer> value2) throws Exception {
        //         System.out.println("value1 = [" + value1 + "], value2 = [" + value2 + "]");
        //         return Tuple2.of(value1.f0, value1.f1+value2.f1);
        //     }
        // });

        // 窗口后，进行 process
        // 将给定的窗口函数应用于每个窗口，对于每一个关键点，都会分别调用窗口函数
            // 注意：此函数要求缓冲窗口中的所有数据，该函数不提供增量聚合的方法。
        // SingleOutputStreamOperator<String> processOperator = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(5)))
        //         .process(new ProcessWindowFunction<Tuple2<String, Integer>, String, String, TimeWindow>() {
        //             @Override
        //             public void process(String s, ProcessWindowFunction<Tuple2<String, Integer>, String, String, TimeWindow>.Context context, Iterable<Tuple2<String, Integer>> elements, Collector<String> out) throws Exception {
        //                 System.out.println("----process invoked...----");
        //
        //                 int maxValue = Integer.MIN_VALUE;
        //                 for (Tuple2<String, Integer> element : elements) {
        //                     maxValue = Math.max(element.f1, maxValue);
        //                 }
        //
        //                 out.collect("当前窗口的最大值是:" + maxValue);
        //             }
        //         });
        //
        // processOperator.print();
        env.execute();
    }
}
