package cn.tedu;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * @author Amos
 * @date 2022/5/18
 */

public class WordCount {
    public static void main(String[] args) throws Exception {
        // 1. 创建批处理的运行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 2. 读取数据源
        DataSource<String> source = env.readTextFile("FLINKJAVA/data/wordcount.txt");
        source.print();

        // 3. 数据的转换处理操作
        // hello hbase hello scala hbase hadoop ->(hello,1),(scala,1),(hello,1),(flink,1)
        FlatMapOperator<String, Tuple2<String, Integer>> flatMapOperator =
                source.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] words = value.split(" ");
                for (String word : words) {
                    out.collect(new Tuple2(word, 1));
                }
            }
        });

//        flatMapOperator.print();
        AggregateOperator<Tuple2<String, Integer>> result =
                flatMapOperator.groupBy(0).sum(1);

        // 4. 数据的sink
        result.print();
    }
}
