package modular.mapreduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.BasicConfigurator;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class JobSubmitter {

    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException, URISyntaxException {
        BasicConfigurator.configure();


        //代码中设置jvm参数，用于修改hdfs 执行代码的用户身份
        System.setProperty("HADOOP_USER_NAME", "root");

        Configuration conf = new Configuration();
        //1.设置job运行时要访问的默认文件系统,设置此参数后 new Path 访问的路径默认带hdfs://hdmaster:9000
        conf.set("fs.defaultFS", "hdfs://hdmaster:9000");
        //2.设置job要提交到哪去运行
        conf.set("yarn.resourcemanager.hostname", "hdmaster");
        //设置跨平台提交：防止出现分隔符与linux 不同
        conf.set("mapreduce.app-submission.cross-platform","true");


        Job job = Job.getInstance(conf);

        //3.封装jar包所在位置(可根据真在加载的类，获取到jar位置）
        job.setJar("E:/wc.jar");
        //job.setJarByClass(JobSubmitter.class);

        //4.封装参数：本次job所要调用的Mapper/Reduce实现类
        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReduce.class);

        //5. 封装参数：本次job的Mapper实现类产生的结果数据的key value 类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        //6. 封装参数：本次job要处理输入数据集所在路径、最终结果的输出路径
        //输出路径必须不存在，如存在会报异常
        Path outPath = new Path("/yue/out");
        FileSystem fs = FileSystem.get(new URI("hdfs://hdmaster:9000"), conf, "root");
        if (fs.exists(outPath)) {
            fs.delete(outPath, true);
        }

        FileInputFormat.setInputPaths(job, new Path("/yue"));
        FileOutputFormat.setOutputPath(job, outPath);

        //7. 封装参数想要启动的reduceTask的数量 默认：1个
        job.setNumReduceTasks(2);

        //8. 提交job 到yarn
        boolean res = job.waitForCompletion(true);
        System.exit(res ? 0 : -1);

    }
}
