package com.atguigu.wordcount.multiplejob;

import com.atguigu.constant.CommonConstantValue;
import com.atguigu.util.HdfsUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


/**
 * @ClassName ExDriver
 * @DeScription ExDriver
 * @Author 剧情再美终是戏
 * @Date 2019/11/20 15:59
 * @Version 1.0
 **/
public class ExDriver {

    public static void main(String[] args) throws Exception {

        // 创建两个配置
        Configuration conf1 = new Configuration();
        Configuration conf2 = new Configuration();

        // 设置job2的KeyValueTextInputForamt分割符
        conf1.set(KeyValueLineRecordReader.KEY_VALUE_SEPERATOR, ":");


        // 定义输出路径
        Path job1InputPath = new Path(CommonConstantValue.HADOOP_TEST_WORD_FILE + "\\mrinput\\index");
        Path job1OutputPath = new Path(CommonConstantValue.HADOOP_TEST_WORD_FILE + "\\mroutput\\index");
        Path job2OutputPath = new Path(CommonConstantValue.HADOOP_TEST_WORD_FILE + "\\mroutput\\finalindex");

        // 清空输出文件
        FileSystem fs1 = HdfsUtil.getFileSystem(conf1);
        FileSystem fs2 = HdfsUtil.getFileSystem(conf2);
        HdfsUtil.delete(fs1, job1OutputPath, true);
        HdfsUtil.delete(fs2, job2OutputPath, true);


        // -----------------------设置job1--------------------------------
        Job job1 = Job.getInstance(conf1);
        job1.setJobName("index1");

        // mapper类，reducer类
        job1.setMapperClass(Ex1Mapper.class);
        job1.setReducerClass(Ex1Reducer.class);

        // 设置map,reducer KV格式
        job1.setOutputKeyClass(Text.class);
        job1.setOutputValueClass(IntWritable.class);

        // 输入输出位置
        FileInputFormat.setInputPaths(job1, job1InputPath);
        FileOutputFormat.setOutputPath(job1, job1OutputPath);
        // -----------------------设置job1 end-------------------------------


        // -----------------------设置job2--------------------------------
        Job job2 = Job.getInstance(conf2);
        job2.setJobName("index2");

        // mapper类，reducer类
        job2.setMapperClass(Ex2Mapper.class);
        job2.setReducerClass(Ex2Reducer.class);

        // 设置map,reducer KV格式
        job2.setOutputKeyClass(Text.class);
        job2.setOutputValueClass(Text.class);

        // 输入输出位置
        FileInputFormat.setInputPaths(job2, job1OutputPath);
        FileOutputFormat.setOutputPath(job2, job2OutputPath);

        // 设置输入格式为KeyValueTextInputFormat
        job2.setInputFormatClass(KeyValueTextInputFormat.class);
        // -----------------------设置job1 end-------------------------------


        // 创建ControlledJob并设置依赖关系
        ControlledJob controlledJob1 = new ControlledJob(job1.getConfiguration());
        ControlledJob controlledJob2 = new ControlledJob(job2.getConfiguration());
        controlledJob2.addDependingJob(controlledJob1);

        // 创建JobControl,并将ControlledJob添加进来
        JobControl jobControl = new JobControl("index");
        jobControl.addJob(controlledJob1);
        jobControl.addJob(controlledJob2);

        // 启动JobControl
        Thread jobControlThread = new Thread(jobControl);
        jobControlThread.setDaemon(true); // 设置为守护线程
        jobControlThread.start();

        // 获取结果并打印
        while (true) {
            if (jobControl.allFinished()) {
                System.out.println(jobControl.getSuccessfulJobList());
                break;
            }
            Thread.sleep(500);
        }
        System.out.println("执行成功！~");
    }
}
