package edu.nepu.flink.api.ontimer;

import edu.nepu.flink.api.bean.WaterSensor;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.*;

/**
 * @Date 2024/3/1 21:19
 * @Created by chenshuaijun
 */
public class TopNOnTimer {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        SingleOutputStreamOperator<WaterSensor> sourceStream = env.socketTextStream("hadoop102", 9999).map(new MapFunction<String, WaterSensor>() {
            @Override
            public WaterSensor map(String value) throws Exception {
                String[] split = value.split(",");
                return new WaterSensor(split[0], Long.parseLong(split[1]), Integer.parseInt(split[2]));
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<WaterSensor>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<WaterSensor>() {
            @Override
            public long extractTimestamp(WaterSensor element, long recordTimestamp) {
                return element.getTs() * 1000;
            }
        }));

        SingleOutputStreamOperator<Tuple3<Long, Integer, Integer>> aggregateStream = sourceStream
                .keyBy(WaterSensor::getVc)
                .window(SlidingEventTimeWindows.of(Time.seconds(10), Time.seconds(5))).aggregate(new AggregateFunction<WaterSensor, Integer, Integer>() {
                    @Override
                    public Integer createAccumulator() {
                        return 0;
                    }

                    @Override
                    public Integer add(WaterSensor value, Integer accumulator) {
                        return accumulator + 1;
                    }

                    @Override
                    public Integer getResult(Integer accumulator) {
                        return accumulator;
                    }

                    @Override
                    public Integer merge(Integer a, Integer b) {
                        return null;
                    }
                }, new ProcessWindowFunction<Integer, Tuple3<Long, Integer, Integer>, Integer, TimeWindow>() {
                    @Override
                    public void process(Integer integer, ProcessWindowFunction<Integer, Tuple3<Long, Integer, Integer>, Integer, TimeWindow>.Context context, Iterable<Integer> elements, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception {
                        long end = context.window().getEnd();
                        out.collect(Tuple3.of(end, integer, elements.iterator().next()));
                    }
                });
        /**
         * 我们对数据按照key进行隔离之后，调用process，按理说同一个key的数据是要进行同一个process方法的，同时定时器也是安装key隔离的
         * 但是如果我们在process中没有使用状态，而是向现在一样只使用了一个普通的集合。这个集合是按照分区进行隔离的，即同一个分区的数据
         * 都可以访问到这个集合，而不是只有key相同的时候才能访问到这个集合，验证方式可以看edu.nepu.flink.api.state.ErrorStateDemo这和类的演示
         */
        aggregateStream.keyBy(val -> val.f0).process(new KeyedProcessFunction<Long, Tuple3<Long, Integer, Integer>, String>() {

            Map<Long,List<Tuple2<Integer,Integer>>> map;
            @Override
            public void open(Configuration parameters) throws Exception {
                map = new HashMap<>();
            }

            @Override
            public void processElement(Tuple3<Long, Integer, Integer> value, KeyedProcessFunction<Long, Tuple3<Long, Integer, Integer>, String>.Context ctx, Collector<String> out) throws Exception {
                List<Tuple2<Integer, Integer>> tuple2List = map.getOrDefault(value.f0, new ArrayList<>());
                tuple2List.add(Tuple2.of(value.f1,value.f2));
                map.put(value.f0,tuple2List);
                ctx.timerService().registerEventTimeTimer(value.f0+1);
            }

            @Override
            public void onTimer(long timestamp, KeyedProcessFunction<Long, Tuple3<Long, Integer, Integer>, String>.OnTimerContext ctx, Collector<String> out) throws Exception {
                Long currentKey = ctx.getCurrentKey();
                List<Tuple2<Integer, Integer>> tuple2s = map.get(currentKey);
                StringBuffer stringBuffer = new StringBuffer();
                for (int i = 0; i < Math.min(2,tuple2s.size()); i++) {
                    stringBuffer.append("************hot"+(i+1)+"***********\n");
                    stringBuffer.append(tuple2s.get(i) + "\n");
                }
                map.remove(currentKey);
                out.collect(stringBuffer.toString());
            }
        }).print();

        env.execute();
    }
}
