package com.chencong.transform;

import com.chencong.env.FlinkTableEnv;
import com.chencong.udf.MyFlatMapFunc;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Author chencong
 * @Description 有界面流
 * @Date 6:32 下午 2021/8/9
 * @Param
 **/
public class Flink02BoundedStreamingWordCount {
    public static void main(String[] args) throws Exception {
        // 1. 创建流式执行环境
        StreamExecutionEnvironment streamTableEnvironment = FlinkTableEnv.getStreamTableEnvironment();
        //设置并行度
        streamTableEnvironment.setParallelism(1);

        //2. 读取文件
        DataStreamSource<String> inputData = streamTableEnvironment.readTextFile("/Users/chencong/IdeaProjects/BigData_Learning/flink/input");

        // 3. 转换数据格式
        SingleOutputStreamOperator<String> flatMapData = inputData.flatMap(new MyFlatMapFunc());

        SingleOutputStreamOperator<Tuple2<String, Integer>> mapDataToOne = flatMapData.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String s) throws Exception {
                return Tuple2.of(s, 1);
            }
        });


        // 4. 分组
        KeyedStream<Tuple2<String, Integer>, String> keyByData = mapDataToOne.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
            @Override
            public String getKey(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
                return stringIntegerTuple2.f0;
            }
        });

        // 5. 求和(流数据用keyby)
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = keyByData.sum(1);

        // 6. 打印
        result.print();

        //7.执行
        streamTableEnvironment.execute();

    }
}
