package com.chenjj.bigdata.flink.batch;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.util.Collector;

/**
 * 通过DataSet的方式统计单词个数
 *
 * 统计本地文件单词个数，并生成文件
 */
public class WordCountByTextFile {

    public static void main(String[] args) throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(2);//并行度，和生成的文件数是一致的
        DataSet<String> text = env.readTextFile("D:\\Code\\Gitee\\bigdata\\flink\\src\\main\\resources\\word.txt");
        String outputPath = "D:\\Temp\\wordCount";

        DataSet<Tuple2<String, Integer>> counts =
                // split up the lines in pairs (2-tuples) containing: (word,1)
                text.flatMap(new Tokenizer())
                .map(new MapFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {
                    @Override
                    public Tuple2<String, Integer> map(Tuple2<String, Integer> value) throws Exception {
                        return new Tuple2<>(value.f0.toUpperCase(),value.f1);
                    }
                }).groupBy(0).sum(1);
        counts.print();
        //counts.writeAsCsv(outputPath, "\n", " ");
        counts.writeAsText(outputPath, FileSystem.WriteMode.OVERWRITE);
        env.execute();
    }

    // User-defined functions
    static class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> {

        @Override
        public void flatMap(String value, Collector<Tuple2<String, Integer>> out) {
            // normalize and split the line
            String[] tokens = value.toLowerCase().split("\\W+");

            // emit the pairs
            for (String token : tokens) {
                if (token.length() > 0) {
                    out.collect(new Tuple2<String, Integer>(token, 1));
                }
            }
        }
    }
}
