package com.hkbigdata.window;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.sql.Timestamp;

/**
 * @author liuanbo
 * @creat 2023-04-23-15:49
 * @see 2194550857@qq.com
 */
public class Flink06_Window_Agg {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.读取端口数据
        DataStreamSource<String> socketTextStream = env.socketTextStream("hadoop102", 9999);

        //3.压平并转换为元组
        SingleOutputStreamOperator<Tuple2<String, Integer>> wordToOneDS = socketTextStream.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(new Tuple2<>(word, 1));
                }
            }
        });

        //4.分组
        KeyedStream<Tuple2<String, Integer>, String> keyedStream = wordToOneDS.keyBy(data -> data.f0);

        //5.开窗
        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> window = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(8)));

        //6.增量聚合,window的输入Tuple2,输出integer，作为windowFunction的输入integr，输出为tuple2
        SingleOutputStreamOperator<Tuple2<String, Integer>> aggregate = window.aggregate(new MyAgg(), new MyWindowFunc());

        aggregate.print();

        env.execute();

    }

    public static class MyAgg implements AggregateFunction<Tuple2<String, Integer>, Integer, Integer> {
        //累加器初始化
        @Override
        public Integer createAccumulator() {
            return 0;
        }

        //计数累加1
        @Override
        public Integer add(Tuple2<String, Integer> value, Integer accumulator) {
            return accumulator + value.f1;
        }

        //获取结果集
        @Override
        public Integer getResult(Integer accumulator) {
            return accumulator;
        }

        //        累加器的合并: 只有会话窗口才会调用
        @Override
        public Integer merge(Integer a, Integer b) {
            return a + b;
        }
    }

    public static class MyWindowFunc implements WindowFunction<Integer, Tuple2<String, Integer>, String, TimeWindow> {

        @Override
        public void apply(String key, TimeWindow window, Iterable<Integer> input, Collector<Tuple2<String, Integer>> out) throws Exception {
            Integer next = input.iterator().next();//获取输入长度
            out.collect(new Tuple2<>(new Timestamp(window.getStart()) + "key:" + key, next));
        }
    }
}
