package com.atguigu.flink.wordcount;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/**
 * Created by Smexy on 2023/2/23
 *      批处理:  DataSet,  DataStream
 *      流处理:  DataStream
 *
 *      从1.11开始，推荐使用流处理的api，提供了批模式。
 *          流批一体。  本质上使用的是流处理的api，可以通过设置，使其使用批模式运行。
 *
 *       参考5.1.2课件章节。
 *
 */
public class Demo4_StreamBatchOne
{
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);
        //设置运行的模式
       // env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
        //前提：必须是有界流
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);

        //DataStreamSource<String> source = env.socketTextStream("hadoop103",8888);
        DataStreamSource<String> source = env.readTextFile("data/words.txt");


        SingleOutputStreamOperator<Tuple2<String, Integer>> ds1 = source
            .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>()
            {
                @Override
                public void flatMap(String line, Collector<Tuple2<String, Integer>> out) throws Exception {
                    String[] words = line.split(" ");
                    for (String word : words) {
                        out.collect(new Tuple2<>(word, 1));
                    }
                }
            });

        ds1
            .keyBy(new KeySelector<Tuple2<String, Integer>, String>()
            {
                @Override
                public String getKey(Tuple2<String, Integer> value) throws Exception {
                    return value.f0;
                }
            })
            .sum(1)
            .print();

        env.execute();

    }
}
