package com.fink.demo.wordcount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * 统计字符出现次数(基于DataSet API、是看作数据集来进行操作的、这种写法不推荐)
 *   hello world
 *   hello flink
 *   hello java
 */
public class WordCountBatch {

    public static void main(String[] args) throws Exception {

        // 1.创建执行环境
        ExecutionEnvironment environment = ExecutionEnvironment.getExecutionEnvironment();

        // 2.读取数据
        DataSource<String> dataSource = environment.readTextFile("src\\main\\resources\\word.txt");

        // 3.处理数据
        FlatMapOperator<String, Tuple2<String, Integer>> flatMap = dataSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] s1 = s.split(" ");
                for (String item : s1) {
                    Tuple2<String, Integer> of = Tuple2.of(item, 1);
                    collector.collect(of);
                }
            }
        });

        // 4.分组
        UnsortedGrouping<Tuple2<String, Integer>> groupBy = flatMap.groupBy(0);

        // 5. 聚合
        AggregateOperator<Tuple2<String, Integer>> sum = groupBy.sum(1);

        // 6. 输出
        sum.print();
    }
}
