package com.mr.example3.friend;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/*
 * 1.Example1Driver提交两个Job
 * 		Job2必须依赖与Job1，必须在Job1已经完成之后，生成结果后才能执行
 *
 * 2.JobControl：定义一组MR Jobs，还可以指定其依赖关系
 * 		可以通过addJob(ControlledJob aJob)向一个JobControl中添加Job对象
 *
 * 3.ControlledJob：可以指定依赖关系的Job对象
 * 		创建方式：
 * 		①ControlledJob(Configuration conf)：创建一个Job
 * 		  addDependingJob(ControlledJob dependingJob)：为当前的Job添加依赖的Job
 *
 * 		②ControlledJob(Job job, List<ControlledJob> dependingJobs) :创建一个Job，并指定它依赖于哪个Job
 *
 */

public class Example3Driver {

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {

		// 设置输入输出目录
		Path input = new Path("E:/尚硅谷大数据/05-Hadoop，HadoopHA，Zookeeper/05-Mapreduce/mrinput/friend");
		Path output = new Path("E:/尚硅谷大数据/05-Hadoop，HadoopHA，Zookeeper/05-Mapreduce/mroutput/friend");
		// 第二个Job的输出目录
		Path finalOutput = new Path("E:/尚硅谷大数据/05-Hadoop，HadoopHA，Zookeeper/05-Mapreduce/mroutput/final_friend");

		// 创建配置环境
		Configuration conf1 = new Configuration();
		Configuration conf2 = new Configuration();

		// 设置-为分隔符
		conf1.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", ":");

		// 保证输出目录不能存在
		FileSystem fs1 = FileSystem.get(conf1);

		if (fs1.exists(output)) {

			fs1.delete(output, true);

		}

		FileSystem fs2 = FileSystem.get(conf2);

		if (fs2.exists(finalOutput)) {

			fs2.delete(finalOutput, true);

		}

		// 创建Job1
		Job job1 = Job.getInstance(conf1);
		// 创建Job2
		Job job2 = Job.getInstance(conf2);

		// 设置名称
		job1.setJobName("friend1");
		job2.setJobName("friend2");

		// 设置Job的Mapper，Reducer类
		job1.setMapperClass(Example3Mapper1.class);
		job1.setReducerClass(Example3Reducer1.class);
		job2.setMapperClass(Example3Mapper2.class);
		job2.setReducerClass(Example3Reducer2.class);

		// 声明使用KeyValueTextInputFormat
		job1.setInputFormatClass(KeyValueTextInputFormat.class);
		job2.setInputFormatClass(KeyValueTextInputFormat.class);

		// 设置输入输出路径
		FileInputFormat.setInputPaths(job1, input);
		FileOutputFormat.setOutputPath(job1, output);
		FileInputFormat.setInputPaths(job2, output);
		FileOutputFormat.setOutputPath(job2, finalOutput);

		// 将类WCDriver所在的jar包作为job的jar包
		job1.setJarByClass(Example3Driver.class);
		job2.setJarByClass(Example3Driver.class);

		// 设置输入输出类型
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(Text.class);
		job2.setMapOutputKeyClass(Text.class);
		job2.setMapOutputValueClass(Text.class);
		job2.setOutputKeyClass(Text.class);
		job2.setOutputValueClass(NullWritable.class);

		//————————————————————————————————————————
		// 创建JobControl（作业组）
		JobControl jobControl = new JobControl("friend");

		// 创建ControlledJob
		ControlledJob controlledJob1 = new ControlledJob(job1.getConfiguration());
		ControlledJob controlledJob2 = new ControlledJob(job2.getConfiguration());

		// 指定Job2依赖于Job1
		controlledJob2.addDependingJob(controlledJob1);

		// 将ControlledJob对象添加到作业组
		jobControl.addJob(controlledJob1);
		jobControl.addJob(controlledJob2);

		// 运行jobControl
		Thread jobControlThread = new Thread(jobControl);
		// 设置当前线程为守护线程
		jobControlThread.setDaemon(true);
		// 启动线程
		jobControlThread.start();

		// 获取jobControl运行状态
		while (true) {

			if(jobControl.allFinished()) {

				System.out.println(jobControl.getSuccessfulJobList());

				return;

			}

		}


	}

}
