package com.myflink.day01;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;


/**
 * @author Shelly An
 * @create 2020/9/15 10:09
 * 对比spark来学习，批处理WordCount
 */
public class WordCount_Batch {
    public static void main(String[] args) throws Exception {
        //0. 初始化sc
        //f0.创建执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();


        //1. 读取数据
        //f1. 从文件（有界）读取数据 DateSet
        DataSource<String> fileDS = env.readTextFile("input/word.txt");

        //2. 处理数据
        //f2.1 切分、转换成二元组(word,1)
        FlatMapOperator<String, Tuple2<String, Integer>> wordAndOneTuple = fileDS.flatMap(new MyFlatMapFunction());
        //f2.2 按照word分组
        UnsortedGrouping<Tuple2<String, Integer>> wordAndOneGroup = wordAndOneTuple.groupBy(0);
        //f2.3 按照分组聚合
        AggregateOperator<Tuple2<String, Integer>> result = wordAndOneGroup.sum(1);

        //3. 打印/输出/保存
        result.print();

        //4. 启动（批处理不需要）
    }

    public static class MyFlatMapFunction implements FlatMapFunction<String, Tuple2<String, Integer>> {

        /**
         * 切分数据为二元组
         *
         * @param value 输入
         * @param out   收集器
         * @throws Exception 异常
         */
        @Override
        public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
            //1. 切分
            String[] words = value.split(" ");
            //2. 转换成二元组
            for (String word : words) {
                Tuple2<String, Integer> tuple = new Tuple2<>(word, 1);
                //out.collect()往下游发送数据
                out.collect(tuple);
            }
        }
    }
}
