package cn._51doit.flink.day10;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.RichWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * 将窗口内的数据进行增量聚合，并且，窗口触发后，再与历史数据进行聚合，最后输出结果
 */
public class WindowAggregateWithHistoryDemo {

    public static void main(String[] args) throws Exception {


        //1.创建Flink执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //2.调用Source创建DataStream
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);
        //3.调用Transformation(s)
        SingleOutputStreamOperator<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String line, Collector<String> collector) throws Exception {
                String[] words = line.split(" ");
                for (String word : words) {
                    collector.collect(word); //将数据输出给下一个算子使用
                }
            }
        });
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndOne = words.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String word) throws Exception {
                return Tuple2.of(word, 1);
            }
        });
        KeyedStream<Tuple2<String, Integer>, String> keyed = wordAndOne.keyBy(t -> t.f0);
        //KeyBy后将数据划分滚动窗口
        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> windowedStream = keyed.window(TumblingProcessingTimeWindows.of(Time.seconds(5)));
        //对窗口内的数据进行增量聚合
        SingleOutputStreamOperator<Tuple2<String, Integer>> res = windowedStream.aggregate(new MyAggregateFunction(), new MyProcessWindowFunction());

        res.print();
        //或将数据写入到Redis或MySQL

        env.execute();

    }


    private static class MyAggregateFunction implements AggregateFunction<Tuple2<String, Integer>, Integer, Integer> {

        //创建初始值
        @Override
        public Integer createAccumulator() {
            return 0;
        }

        //每输入一个相同key的数据调用一次
        @Override
        public Integer add(Tuple2<String, Integer> input, Integer accumulator) {
            return accumulator + input.f1;
        }

        //当窗口触发后，返回结果
        @Override
        public Integer getResult(Integer accumulator) {
            return accumulator;
        }

        //只有Session Window可能调用该方法
        @Override
        public Integer merge(Integer a, Integer b) {
            return null;
        }
    }

    private static class MyProcessWindowFunction extends ProcessWindowFunction<Integer, Tuple2<String, Integer>, String, TimeWindow> {

        private transient ValueState<Integer> countState;

        @Override
        public void open(Configuration parameters) throws Exception {
            ValueStateDescriptor<Integer> stateDescriptor = new ValueStateDescriptor<>("history-count", Integer.class);
            countState = getRuntimeContext().getState(stateDescriptor);
        }

        //当窗口触发后，每个key会调用一次process方法
        @Override
        public void process(String key, Context context, Iterable<Integer> elements, Collector<Tuple2<String, Integer>> out) throws Exception {
            //获取窗口内聚合的结果
            Integer current = elements.iterator().next();

            Integer history = countState.value();
            if (history == null) {
                history = 0;
            }
            current += history;
            countState.update(current);
            out.collect(Tuple2.of(key, current));
        }
    }
}
