package com.study.flink.word;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;


/**
 * 批处理
 * @author: stephen.shen
 * @create: 2019-01-25 16:15
 */
public class JavaBatchWordCount {

    public static void main(String[] args) throws Exception {

        String path = "D:\\ProgramFiles\\flink-1.7.1\\README.txt";

        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        DataSource<String> textFile = env.readTextFile(path);

        DataSet<Tuple2<String, Long>> sum = textFile.flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
            @Override
            public void flatMap(String line, Collector<Tuple2<String, Long>> out) throws Exception {
                String[] splits = line.toLowerCase().split("\\s");
                for (String word : splits) {
                    if (word.length() > 0) {
                        out.collect(new Tuple2<>(word, 1L));
                    }
                }
            }
        }).groupBy(0).sum(1);

        sum.print();

        //env.execute("Java Batch Word Count");
    }

    /**
     * 主要为了存储单词以及单词出现的次数
     */
    public static class WordWithCount {
        public String word;
        public long count;

        public WordWithCount() {
        }

        public WordWithCount(String word, long count) {
            this.word = word;
            this.count = count;
        }

        @Override
        public String toString() {
            return "WordWithCount{" +
                    "word='" + word + '\'' +
                    ", count=" + count +
                    '}';
        }
    }

}
