package com.xl.flinkdemo.wc;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

//批处理word count
//批处理的API就叫做datasetAPI
public class WordCount {

  public static void main(String[] args) throws Exception {

    // 创建执行环境
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    // 从文件中读取数据
    String inputPath = "D:\\studyspace\\flinkdemo\\src\\main\\resources\\";

    //得到数据源
    DataSet<String> inputDataset = env.readTextFile(inputPath);

    // 对数据集进行处理
    //一行就是一个数据，要把他拆成一个一个的word，然后再做count，转换成一个元组<word,1>
    //map之后做一个扁平化
    DataSet<Tuple2<String,Integer>> resultset = inputDataset.flatMap(new MyFlatMapper())
                .groupBy(0) //按照第一个位置的word分组
                .sum(1); //将第二个位置上的数据求和
    //打印输出
    resultset.print();
  }

  //自定义类，实现FlatMapFunction
  public static class MyFlatMapper implements FlatMapFunction<String, Tuple2<String,Integer>>{
    @Override
    public void flatMap(String value, Collector<Tuple2<String,Integer>> out) throws Exception {
      //按空格分词
      String[] words = value.split(" ");
      for (String word: words) {
        //利用collect方法进行输出
        out.collect(new Tuple2<String,Integer>(word,1));
      }
    }
  }
}
