package cn.itcast.flink.base;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * Author itcast
 * Date 2021/7/26 15:57
 * Desc:
 * flink实现一个 WordCount 案例
 * 开发步骤：
 * 1. 创建批执行环境
 * 2. 获取本地的初始化数据
 * 3. 通过空格进行字符串的切割
 * 4. 对每个切分出来的单词进行赋值 [word,1]
 * 5. 对每个元祖根据key进行分组
 * 6. 聚合操作
 */
public class WordCount {
    public static void main(String[] args) throws Exception {
        //todo 1. 创建批执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //todo 2. 获取本地的初始化数据
        DataSource<String> source = env.fromElements("itcast hadoop spark", "itcast hadoop spark", "itcast hadoop", "itcast");
        //todo 3. 通过空格进行字符串的切割
        //map:将一个数据类型转换成另外一个数据类型，将一个值转换成另外一个值 1:1
        //flatMap:将一个值拆分并压扁
        FlatMapOperator<String, String> flatMapDataSet = source.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String value, Collector<String> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(word);
                }
            }
        });
        //todo 4. 对每个切分出来的单词进行赋值 [word,1]
        MapOperator<String, Tuple2<String, Integer>> mapDataSet = flatMapDataSet.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String value) throws Exception {
                return Tuple2.of(value, 1);
            }
        });
        //todo 5. 对每个元祖根据key进行分组
        AggregateOperator<Tuple2<String, Integer>> result = mapDataSet
                .groupBy(0)
                //todo 6. 聚合操作
                .sum(1);
        //todo 7.打印输出
        result.print();
    }
}
