package com.atguigu.chapter07;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/6/11 9:03
 */
public class Flink07_Window_Aggregate {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(2);
        
        env
            .socketTextStream("hadoop162", 9999)
            .flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
                @Override
                public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
                    String[] data = value.split(" ");
                    out.collect(Tuple2.of(data[0], Long.valueOf(data[1])));
                }
            })
            .keyBy(t -> t.f0)
            .window(TumblingProcessingTimeWindows.of(Time.seconds(5)))
            .aggregate(
                new AggregateFunction<Tuple2<String, Long>, Tuple2<Long, Integer>, Double>() {
                    
                    // 初始化一个累加器
                    @Override
                    public Tuple2<Long, Integer> createAccumulator() {
                        System.out.println("Flink07_Window_Aggregate.createAccumulator");
                        return Tuple2.of(0L, 0);
                    }
                    
                    // 把出入的值, 使用累加器进行累计器
                    @Override
                    public Tuple2<Long, Integer> add(Tuple2<String, Long> value,
                                                     Tuple2<Long, Integer> acc) {
                        System.out.println("Flink07_Window_Aggregate.add");
                        return Tuple2.of(acc.f0 + value.f1, acc.f1 + 1);
                    }
                    
                    // 返回最终的结果
                    @Override
                    public Double getResult(Tuple2<Long, Integer> acc) {
                        System.out.println("Flink07_Window_Aggregate.getResult");
                        return acc.f0 * 1.0 / acc.f1;
                    }
                    
                    // 合并两个累加器: 只有会话窗口会用,其他窗口不会用
                    @Override
                    public Tuple2<Long, Integer> merge(Tuple2<Long, Integer> a, Tuple2<Long, Integer> b) {
                        System.out.println("Flink07_Window_Aggregate.merge");
                        return Tuple2.of(a.f0 + b.f0, a.f1 + b.f1);
                    }
                },
                new ProcessWindowFunction<Double, String, String, TimeWindow>() {
                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<Double> elements,
                                        Collector<String> out) throws Exception {
                        Double avg = elements.iterator().next();
                        out.collect("key=" + key + ", window=" + ctx.window() + ", result=" + avg);
                        
                    }
                }
            )
            .print();
        
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
