package com.atguigu.datastream.day05;

import com.atguigu.datastream.bean.Event;
import com.atguigu.datastream.test.day03.Flink_05_Source_useDefault;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.util.HashSet;

/**
 * ClassName: Flink04_ProcessTimeWindow_AggFun
 * Package: com.atguigu.day05
 * Description:
 *           聚合函数
 * @Author ChenJun
 * @Create 2023/4/11 16:42
 * @Version 1.0
 */
public class Flink04_ProcessTimeWindow_AggFun {
    public static void main(String[] args) throws Exception {

        //1. 创建流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);


        //2. 通过自定义数据源获取数据
        DataStreamSource<Event> streamSource = env.addSource(new Flink_05_Source_useDefault.ClickSource());

        //3.将数据聚合达到同一个分区
        KeyedStream<Event, String> keyedStream = streamSource.keyBy(new KeySelector<Event, String>() {
            @Override
            public String getKey(Event event) throws Exception {
                return "key";
            }
        });

        //4.开启一个基于处理时间的滚动窗口，每计算一次5秒内的用户粘度
        WindowedStream<Event, String, TimeWindow> window = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(5)));

        SingleOutputStreamOperator<String> process = window.process(new ProcessWindowFunction<Event, String, String, TimeWindow>() {
            @Override
            public void process(String s, ProcessWindowFunction<Event, String, String, TimeWindow>.Context context, Iterable<Event> elements, Collector<String> out) throws Exception {
                String msg =
                        "窗口: [" + context.window().getStart() / 1000 + "," + context.window().getEnd() / 1000 + ") 一共有 "
                                + elements.spliterator().estimateSize() + "条数据 ";
                out.collect(msg);
            }
        });
        process.print();

        // TODO 调用增量聚合函数AggFun计算用户粘度（pv/uv）
        SingleOutputStreamOperator<Double> result = window.aggregate(new AggregateFunction<Event, Tuple2<Integer, Integer>, Double>() {
            //一个并行度的一个窗口有一个实例
            HashSet<String> hashSet = new HashSet<>();


            //创建累加器 （初始化累加器） 累加器中 第一个元素代表的是pv 第二个元素代表的是uv
            @Override
            public Tuple2<Integer, Integer> createAccumulator() {

                System.out.println("创建累加器");
                return Tuple2.of(0, 0);
            }

            /**
             * ；累加操作
             * @param event  输入的数据
             * @param integerIntegerTuple2  累加器
             * @return
             */
            @Override
            public Tuple2<Integer, Integer> add(Event event, Tuple2<Integer, Integer> integerIntegerTuple2) {
                System.out.println("累加操作");
                hashSet.add(event.user);
                return Tuple2.of(integerIntegerTuple2.f0 + 1, hashSet.size());
            }

            /**
             *  获取结果
             * @param integerIntegerTuple2
             * @return
             */
            @Override
            public Double getResult(Tuple2<Integer, Integer> integerIntegerTuple2) {
                System.out.println("获取结果");
                return integerIntegerTuple2.f0 * 1D / integerIntegerTuple2.f1;
            }

            /**
             * 合并累加器 就是在两个窗口合并的时候，合并两个窗口中的累加器
             * 这个方法只会在基于事件时间乱序数据中使用会话窗口 会调用，其他场景不会调用
             * @param a
             * @param b
             * @return
             */
            @Override
            public Tuple2<Integer, Integer> merge(Tuple2<Integer, Integer> a, Tuple2<Integer, Integer> b) {
                System.out.println("合并累加器");
                return Tuple2.of(a.f0 + b.f0, a.f1 + b.f1);
            }
        });

        result.print();

        env.execute();


    }
}
