package com.atguigu.day03;

import com.atguigu.utils.IntSource;
import com.atguigu.utils.IntStatistic;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

// 限流
public class Example10 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env
                .addSource(new IntSource())
                .keyBy(r -> "integer")
                .process(new Statistic())
                .print();

        env.execute();
    }

    public static class Statistic extends KeyedProcessFunction<String, Integer, IntStatistic> {
        // 声明一个值状态变量
        private ValueState<IntStatistic> accumulator;

        // 标志位状态变量，如果标志位不为空，说明存在定时器
        private ValueState<Boolean> flag;

        @Override
        public void open(Configuration parameters) throws Exception {
            // 初始化值状态变量
            // 只初始化一次，底层相当于初始化了一张HashMap
            accumulator = getRuntimeContext().getState(
                    // 状态描述符
                    new ValueStateDescriptor<IntStatistic>(
                            "accumulator", // 名字随便起，但要保证唯一性
                            Types.POJO(IntStatistic.class)
                    )
            );

            flag = getRuntimeContext().getState(
                    new ValueStateDescriptor<Boolean>(
                            "flag",
                            Types.BOOLEAN
                    )
            );
        }

        @Override
        public void processElement(Integer in, Context ctx, Collector<IntStatistic> out) throws Exception {
            // 如果accumulator.value() == null，说明到达的数据in是in的key所对应的分组的第一条数据
            // accumulator.value()获取的是输入数据in的key所对应的ValueState
            if (accumulator.value() == null) {
                // accumulator.update更新的是输入数据in的key所对应的值状态变量
                accumulator.update(new IntStatistic(
                        in, in, in, 1, in
                ));
            }
            // 后面的数据到达
            else {
                // 取出旧的累加器
                IntStatistic oldAcc = accumulator.value();
                // 输入数据in和旧累加器聚合
                IntStatistic newAcc = new IntStatistic(
                        Math.min(in, oldAcc.min),
                        Math.max(in, oldAcc.max),
                        in + oldAcc.sum,
                        1 + oldAcc.count,
                        (in + oldAcc.sum) / (1 + oldAcc.count)
                );
                accumulator.update(newAcc);
            }

            // 每来一条数据检查一次标志位是否为空
            // 如果为空，注册一个当前机器时间之后10s的定时器
            // 将标志位置为true
            if (flag.value() == null) {
                ctx.timerService().registerProcessingTimeTimer(
                        ctx.timerService().currentProcessingTime() + 10 * 1000L
                );
                flag.update(true);
            }
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<IntStatistic> out) throws Exception {
            // 在定时器中向下游发送统计信息
            out.collect(accumulator.value());
            // 每当定时器触发之后，将标志位清空为null
            flag.clear();
        }
    }
}
