package com.hkbigdata.window;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * @author liuanbo
 * @creat 2023-05-06-17:15
 * @see 2194550857@qq.com
 * 抓取数据                                      ods，dwd，dim，dwm，ads
 * MaxWell/Canal/FinkCDC -> Kafka->Flink App -> Kafka分层-》mpp数据库（clickhouse/doris/dataworks）->Spring Boot 数据接口-》帆软、微软，百度厂商的软件。
 */
public class Flink03_Window_TimeSession {
    public static void main(String[] args) throws Exception {
        //1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        KeyedStream<Tuple2<String, Integer>, String> keyedStream = env.socketTextStream("hadoop102", 9999)
                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                        String[] split = value.split(",");
                        for (String s : split) {
                            out.collect(new Tuple2<>(s, 1));
                        }
                    }
                }).keyBy(data -> data.f0);
        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> window = keyedStream.window(ProcessingTimeSessionWindows.withGap(Time.seconds(5L)));

        window.sum(1).print();

        env.execute();

    }
}
