package com.atguigu.datastream.test.day05;

import com.atguigu.datastream.bean.Event;
import com.atguigu.datastream.test.day03.Flink_05_Source_useDefault;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.util.HashSet;

/**
 * ClassName: Flink04_ProcessTimeWindow_AggFun
 * Package: com.atguigu.test.day05
 * Description:
 *
 * @Author ChenJun
 * @Create 2023/4/11 23:08
 * @Version 1.0
 */
public class Flink04_ProcessTimeWindow_AggFun {
    public static void main(String[] args) throws Exception {

        //1. 创建流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2. 从端口读取数据
        DataStreamSource<String> streamSource = env.socketTextStream("localhost", 9999);

        //3. 通过自定义数据源获取数据
        DataStreamSource<Event> dataStreamSource = env.addSource(new Flink_05_Source_useDefault.ClickSource());


        //4. 将数据聚合达到同一个分区
        KeyedStream<Event, String> keyedStream = dataStreamSource.keyBy(new KeySelector<Event, String>() {
            @Override
            public String getKey(Event value) throws Exception {
                return "key";
            }
        });

        //5. 开启一个基于处理时间的滚动窗口，每计算一次5秒内的用户粘度
        WindowedStream<Event, String, TimeWindow> window = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(5)));


        // TODO 调用增量聚合函数AggFun计算用户粘度（pv/uv）
        SingleOutputStreamOperator<Double> streamOperator = window.aggregate(new AggregateFunction<Event, Tuple2<Integer, Integer>, Double>() {
            //一个并行度的一个窗口有一个实例
            HashSet<String> userSet = new HashSet<>();

            //创建累加器 （初始化累加器） 累加器中 第一个元素代表的是pv 第二个元素代表的是uv
            @Override
            public Tuple2<Integer, Integer> createAccumulator() {
                System.out.println("创建累加器");
                return Tuple2.of(0, 0);
            }

            /**
             *
             * @param value 输入的数据
             * @param accumulator 累加器
             * @return
             */
            @Override
            public Tuple2<Integer, Integer> add(Event value, Tuple2<Integer, Integer> accumulator) {
                System.out.println("累加操作");
                userSet.add(value.user);
                return Tuple2.of(accumulator.f0 + 1, userSet.size());
            }

            @Override
            public Double getResult(Tuple2<Integer, Integer> accumulator) {
                System.out.println("累加结果");
                return accumulator.f0 * 1D / accumulator.f1;
            }

            @Override
            public Tuple2<Integer, Integer> merge(Tuple2<Integer, Integer> a, Tuple2<Integer, Integer> b) {
                return Tuple2.of(a.f0 + b.f0, a.f1 + b.f1);
            }
        });

        streamOperator.print();
        env.execute();
    }
}
