package com.hkbigdata.source;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.*;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

/**
 * @author liuanbo
 * @creat 2024-03-06-15:59
 * @see 2194550857@qq.com
 */
public class Flink002_WordCount_Batch {
    public static void main(String[] args) throws Exception {
        //1.准备数据源
        //最终呈现效果(spark,2) (kafak,2) (flink,2) (hadoop,3)

        //2.创建环境,批处理环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //并行度设置为1
        env.setParallelism(1);
        //3.利用环境读取数据
        DataSource<String> stringDataSource = env.readTextFile("input/word.txt");
        // System.out.println(stringDataSource);

        FlatMapOperator<String, String> FlatMapOperator = stringDataSource.flatMap(new MyFlatMap());
        // System.out.println(stringStringFlatMapOperator);

        //4.将单词转换为二元组 (spark,1) (hadoop,1) (kafka,1) (hbase,1)
        MapOperator<String, Tuple2<String, Integer>> wordTOone = FlatMapOperator.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String word) throws Exception {
                return new Tuple2<>(word, 1);
            }
        });

        //wordTOone.setParallelism(1).print();
        //5.单词分组(spark,1) (spark,1)
        UnsortedGrouping<Tuple2<String, Integer>> tuple2UnsortedGrouping = wordTOone.groupBy(0);
        //6.聚合(spark,2)
        AggregateOperator<Tuple2<String, Integer>> sum = tuple2UnsortedGrouping.sum(1);

        //7.打印
        sum.print();


    }

    //flatmap 将数据进行扁平化
    public static class MyFlatMap implements FlatMapFunction<String, String> {

        @Override
        public void flatMap(String value, Collector<String> collector) throws Exception {
            //按照空格分隔一行数据
            String[] arr = value.split(" ");
            for (int i = 0; i < arr.length; i++) {
                collector.collect(arr[i]);
            }
        }
    }
}
