package com.desheng.bigdata.flink.batch;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.util.Collector;

/**
 * java版本的flink batch的入门体验案例
 *  wordcount
 *  变成步骤：
 *  1、创建程序入口ExecutionEnvironment
 *      stream中叫做StreamExecutionEnvironment
 *      本地运行叫做ExecutionLocalEnvironment
 *          ExecutionEnvironment.createLocalEnvironment()
 *      远程运行叫做ExecutionRemoteEnvironment
 *          ExecutionEnvironment.createRemoteEnvironment()
 *
 *         上面两个太麻烦，使用 getExecutionEnvironment()方法，会根据据需要创建不同的Environment
 *  2、加载外部数据源，得到编程模型Dataset/datastream
 *      提供了两种方式，加载外部文件，或者外部集合，或者自定义（需要在stream中完成自定义）
 *  3、进行业务编程
 *  4、写入外部系统中
 *  5、通过execute方法提交作业的执行
 *      在dataset编程中需要注意：如果最后执行的算子为count、print、collect等等，因为这些算子中已经调用过了execute方法，就不能
 *    再重复调用了，否则报错： RuntimeException: No new data sinks have been defined since the last execution.
 */
public class JavaFlinkBatchWordCountApp {
    public static void main(String[] args) throws Exception {
        if(args == null || args.length != 2) {
            System.err.print("Usage： <input> <output>");
            System.exit(-1);
        }
        //step 1、create program entry point
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        //step 2、load external data by env
        DataSource<String> inputs = env.readTextFile(args[0]);
        //step 3、执行业务逻辑
        FlatMapOperator<String, Tuple2<String, Integer>> pairs = inputs.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            /*
                这里的编程类似于mr的编程，需要collector将数据收集起来发送到下游
             */
            @Override
            public void flatMap(String line, Collector<Tuple2<String, Integer>> out) throws Exception {
                String[] words = line.split("\\s+");
                for (String word : words) {
                    out.collect(new Tuple2<String, Integer>(word, 1));
                }
            }
        });

        AggregateOperator<Tuple2<String, Integer>> aggrVal = pairs.groupBy(0).sum(1);

        aggrVal.writeAsText(args[1], FileSystem.WriteMode.OVERWRITE);

        env.execute(JavaFlinkBatchWordCountApp.class.getSimpleName());
    }
}
