package com.atguigu.day01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

public class Flink01_Batch_WordCount {
    public static void main(String[] args) throws Exception {
        //1.获取相关环境(Flink)
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //2.读取文件中的数据
        DataSource<String> dataSource = env.readTextFile("input/word.txt");
        //3.先用flatMap将每一行数据按照空格切分切出每一个单词->将每个单词组成Tuple2元组(word,1)
        FlatMapOperator<String, Tuple2<String, Integer>> wordToOne = dataSource.flatMap(new MyFlatMap());
        //4.调用reduceBykey(1.将相同单词聚合到一块2.根据传入的逻辑对value做累加)
        //4.1将相同单词聚合到一块
        UnsortedGrouping<Tuple2<String, Integer>> groupBy = wordToOne.groupBy(0);
        //4.2根据传入的逻辑对value做累加
        AggregateOperator<Tuple2<String, Integer>> result = groupBy.sum(1);
        //5.打印到控制台
        result.print();
    }

    public static class MyFlatMap implements FlatMapFunction<String, Tuple2<String,Integer>>{

        /**
         *
         * @param value 读到的数据
         * @param out 采集器可以将数据发送至下游，相当于return的效果
         * @throws Exception
         */
        @Override
        public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
            //将每一行数据按照空格切分
            String[] words = value.split(" ");
            //遍历数组中的每一个单词
            for (String word : words) {
                //将每一个单词组成Tuple2元组
//                Tuple2<String, Integer> tuple2 = new Tuple2<>(word, 1);
//                out.collect(tuple2);
                out.collect(Tuple2.of(word,1));
            }
        }
    }
}
