package com.atguigu.flink0725;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.UnsortedGrouping;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

public class BatchWordCount {
    public static void main(String[] args) throws Exception {
        // 创建执行环境，批数据
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        // 从文件读取数据
        DataSource<String> text = env.readTextFile("D:\\Project\\flink\\Flink0725\\input\\words.txt");


        // 将每行数据进行分词，转换成（word, 1）
        FlatMapOperator<String, Tuple2<String, Integer>> wordAndOne = text.flatMap(new SplitClz());
        UnsortedGrouping<Tuple2<String, Integer>> groupedWordAndOne = wordAndOne.groupBy(0);
        AggregateOperator<Tuple2<String, Integer>> out = groupedWordAndOne.sum(1);
        out.writeAsCsv("D:\\Project\\flink\\Flink0725\\output\\word1.txt","\n"," ").setParallelism(1);

        env.execute();
    }
    static class SplitClz implements FlatMapFunction<String, Tuple2<String,Integer>>{

        @Override
        public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
            String[] s1 = s.split(" ");
            for (String word : s1) {
                collector.collect(new Tuple2<String,Integer>(word,1));
            }

        }
    }
}
