package cn.itcast.flink.join;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Random;

/**
 * Author itcast
 * Date 2022/1/14 10:07
 * 需求1：定义一个会话时间窗口, 5秒gap, 统计全量数据之和
 * 需求2:  定义一个会话时间窗口, 5秒gap, 统计按照key分组后的每个组数据内的数字和
 */
public class SessionWindowDemo {
    public static void main(String[] args) throws Exception {
        //获取流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度及其参数
        env.setParallelism(1);
        //获取数据源 Tuple2<key,value>
        DataStreamSource<Tuple2<String, Integer>> source = env.addSource(new GenerateRandomNumRandomSecond());
        //通过设置 会话窗口，并计算
        /*SingleOutputStreamOperator<Tuple2<String, Integer>> result1 = source.windowAll(ProcessingTimeSessionWindows.withGap(Time.seconds(5)))
                .sum(1);*/
        //通过设置 会话窗口，根据f0分组并计算
        SingleOutputStreamOperator<Tuple2<String, Integer>> result2 = source.keyBy(t -> t.f0)
                .window(ProcessingTimeSessionWindows.withGap(Time.seconds(5)))
                .sum(1);
        //打印输出
        result2.print();
        //执行流环境
        env.execute();
    }

    /*
    自定义Source
    每隔随机时间(1~7秒之间)产生一个的k,v  k是hadoop spark flink 其中某一个, v是随机数字
     */
    public static class GenerateRandomNumRandomSecond implements SourceFunction<Tuple2<String, Integer>> {
        private volatile boolean isRunning = true;
        private final Random random = new Random();
        private final List<String> keyList = Arrays.asList("hadoop", "spark", "flink");
        //定义时间格式化
        private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

        @Override
        public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
            while (this.isRunning) {
                String key = keyList.get(random.nextInt(3));
                //生成一个 Tuple2<hadoop/spark/flink,1~9随机值>
                Tuple2<String, Integer> value = Tuple2.of(key, random.nextInt(9));
                ctx.collect(value);
                long sleepTime = 5000L;
                //如果当前的休眠时间为 5s ，随机时间为 0~7s 的间隔
                while (sleepTime == 5000L) {
                    sleepTime = random.nextInt(7) * 1000L;
                }
                System.out.println(sdf.format(new Date()) + ":---will sleep " + sleepTime + " ms---: " + value);
                Thread.sleep(sleepTime);
            }
        }

        @Override
        public void cancel() {
            this.isRunning = false;
        }
    }
}
