package com.atguigu.flink.wordcount;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/3/31
 *
 *      流批一体。
 *              程序员只需要掌握 Stream计算的api。
 *              通过简单的参数设置，允许程序以 批模式(一次性读取所有的数据，再计算)运行。
 *
 */
public class Demo4_StreamBatchOneDemo
{
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        /*
                默认为 RuntimeExecutionMode.STREAMING 模式

                        RuntimeExecutionMode.BATCH的使用前提是，数据源是一个有界流！
                        RuntimeExecutionMode.AUTOMATIC： 数据源全部是 有界流，设置为 BATCH，只要有一个是无界流，就是 STRAMING

                 一般情况下，代码里不写，在打包有提交到集群运行时，通过参数设置！
                    flink run -Dexecution.runtime-mode=BATCH
         */
        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);

        env.setParallelism(1);

       // DataStreamSource<String> ds = env.socketTextStream("hadoop102", 8888);
        DataStreamSource<String> ds = env.readTextFile("data/words.txt");

        SingleOutputStreamOperator<Tuple2<String, Integer>> ds2 = ds
            .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>()
            {
                @Override
                public void flatMap(String line, Collector<Tuple2<String, Integer>> out) throws Exception {

                    String[] words = line.split(" ");
                    for (String word : words) {
                        out.collect(Tuple2.of(word, 1));
                    }
                }
            });
        ds2
            .keyBy(new KeySelector<Tuple2<String, Integer>, String>()
            {
                @Override
                public String getKey(Tuple2<String, Integer> value) throws Exception {
                    return value.f0;
                }
            })
            .sum(1)
            .print();


        env.execute();

    }
}
