package com.caul.demo.hadoop.mapreduce.rank;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;

/**
 * 这是一个yarn的客户端
 * 功能:将我们写到mapreduce程序(jar)提交给yarn服务器, 让yarn将jar分发到所有的nodemanager执行
 * 不建议用java -cp XXX.jar MainClass启动,
 * 建议用hadoop jar XXX.jar MainClass启动 [topn] [outputDir] [inputFile/inputPath...](它会自动导入依赖)
 * Created by sdliang on 2018/3/30.
 */
public class RankSubmitter {

  public static void main(String[] args)
      throws IOException, ClassNotFoundException, InterruptedException {

    int topn = Integer.parseInt(args[0]);

    Path destPath = new Path(args[1]);


    Path[] srcPaths = new Path[args.length - 2];
    for (int i = 2; i < args.length; i++) {
      srcPaths[i - 2] = new Path(args[i]);
    }


    Configuration conf = new Configuration();
    conf.set("top.n", "" + topn);
    Job job = Job.getInstance(conf, "RankDemo");

    //指定执行的jar包
    //    job.setJar("pv.jar");
    job.setJarByClass(RankSubmitter.class);
    //指定mapper信息
    job.setMapperClass(RankMapper.class);
    job.setMapOutputKeyClass(RankKey.class);
    job.setMapOutputValueClass(NullWritable.class);
    //指定reducer信息
    job.setReducerClass(RankReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(NullWritable.class);
    //key分组比较器
    job.setGroupingComparatorClass(RankGroupingComparator.class);
    //告诉yarn源文件的读取类型
    job.setInputFormatClass(TextInputFormat.class);
    //设置读取的文件路径
    FileInputFormat.setInputPaths(job, srcPaths);
    //设置输出结果
    job.setOutputFormatClass(TextOutputFormat.class);
    FileOutputFormat.setOutputPath(job, destPath);
    //设置reducer分区机制
    job.setPartitionerClass(RankPartitioner.class);
    job.setNumReduceTasks(1);

    boolean flag = job.waitForCompletion(true);
    //客户端程序退出
    System.exit(flag ? 0 : 1);
  }
}
