package com.abyss.window;


import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.windowing.assigners.*;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Random;

/**
 * 时间会话窗口的演示
 * 按照5秒的gap设置
 *
 * 案例: 自定义一个Source, 每隔随机的秒(1~10)之间产生1条数据
 * 数据是key value, key: hadoop spark flink 其中一个, value: 是随机的数字
 * 需求1: 定义一个会话时间窗口, 5秒gap, 统计全量数据之和
 * 需求2: 定义一个会话时间窗口, 5秒gap, 统计按照key分组后的每个组数据内的数字和
 */
public class TimeSessionWindowDemo {
    final private static SimpleDateFormat sdf = new SimpleDateFormat("mm:ss.SSS");
    public static void main(String[] args) throws Exception {
        // Env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // Source
        DataStreamSource<Tuple2<String, Integer>> source = env.addSource(new GenerateRandomNumRandomSecond());

        // Session window, gap 5秒, no keyby
        SingleOutputStreamOperator<Tuple2<String, Integer>> sumAll = source
                .windowAll(ProcessingTimeSessionWindows.withGap(Time.seconds(5))).sum(1);

//        sumAll.print(sdf.format(new Date()) + "|sum print>>>");

        // Session window, gap 5秒, with keyby
        SingleOutputStreamOperator<Tuple2<String, Integer>> sumEachKey = source
                .keyBy(0).window(ProcessingTimeSessionWindows.withGap(Time.seconds(5))).sum(1);

        sumEachKey.print(sdf.format(new Date()) + "|each key print>>>");

        env.execute();
    }

    /*
    自定义Source
    每隔随机时间(1~7秒之间)产生一个的k,v  k是hadoop spark flink 其中某一个, v是随机数字
     */
    public static class GenerateRandomNumRandomSecond implements SourceFunction<Tuple2<String, Integer>> {
        private boolean isRun = true;
        private final Random random = new Random();
        private final List<String> keyList = Arrays.asList("hadoop", "spark", "flink");
        @Override
        public void run(SourceContext<Tuple2<String, Integer>> ctx) throws Exception {
            while (this.isRun) {
                String key = keyList.get(random.nextInt(3));
                Tuple2<String, Integer> value = Tuple2.of(key, random.nextInt(9));
                ctx.collect(value);
                long sleepTime = 5000L;
                while (sleepTime == 5000L) {
                    sleepTime = random.nextInt(7) * 1000L;
                }
                System.out.println(sdf.format(new Date()) + ":---will sleep " + sleepTime + " ms---: " + value);
                Thread.sleep(sleepTime);
            }
        }

        @Override
        public void cancel() {
            this.isRun = false;
        }
    }
}