package cn.hyxy.hadoop;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.List;

import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.shell.Count;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class Demo18_ControlJobMR extends Configured implements Tool {
	@Override
	public int run(String[] args) throws Exception {
		if (args.length != 2) {
			System.out.println("usage ...");
			return -1;
		}
		Configuration config = getConf();

		config.set("fs.defaultFS", "hdfs://hadoop31:8020");
		config.set("mapreduce.framework.name", "yarn");
		config.set("yarn.resourcemanager.hostname", "hadoop31");
		config.set("mapreduce.app-submission.cross-platform", "true");

		FileSystem fs = FileSystem.get(config);
		Path path1 = new Path(args[1]);
		if (fs.exists(path1)) {
			fs.delete(path1, true);
		}
		Path path2 = new Path(args[1] + "_");
		if (fs.exists(path2)) {
			fs.delete(path2, true);
		}

		// Job1 !!!
		Job job1 = Job.getInstance(config, "JobControl");
//		job1.setJarByClass(getClass()); // 本地测试时，这句话不需要！！
		job1.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");
		job1.setMapperClass(MyMapper.class);
		job1.setMapOutputKeyClass(Text.class);
		job1.setMapOutputValueClass(LongWritable.class);
		job1.setReducerClass(MyReducer.class);
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(LongWritable.class);
		// job1.setOutputFormatClass(TextOutputFormat.class);
		FileInputFormat.addInputPath(job1, new Path(args[0]));
		FileOutputFormat.setOutputPath(job1, path1);

		// job2!!!
		Job job2 = Job.getInstance(config, "JobControl2");
//		job2.setJarByClass(getClass()); // 本地测试时，这句话不需要！！
		job2.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");
		job2.setMapperClass(SortMapper.class);
		job2.setOutputKeyClass(Wordcount.class);
		job2.setOutputValueClass(NullWritable.class);
		// job2.setNumReduceTasks(0); 必须要有一个Shuffle的过程！！！
		FileInputFormat.addInputPath(job2, path1);// 前一个job的输入，是后一个job的输出！
		FileOutputFormat.setOutputPath(job2, path2);// out001_1

		// 声明ControledJob
		ControlledJob controlledJob1 = new ControlledJob(config);
		controlledJob1.setJob(job1);

		ControlledJob controlledJob2 = new ControlledJob(config);
		controlledJob2.setJob(job2);

		// 最关键就是设置job2是在job1执行完成以后执行
		controlledJob2.addDependingJob(controlledJob1); // 只有controlledJob1的时候，这句话不可以写！！！！（java.lang.StackOverflowError）

		// 声明JobControl
		JobControl jobControl = new JobControl("AAA");
		// 再给JobControll添加这个controlledJob1，controlledJob12
		jobControl.addJob(controlledJob1);
		jobControl.addJob(controlledJob2);

		// 启动jobControl
		new Thread(jobControl).start();

		int code = 0;
		while (true) {
			if (jobControl.allFinished()) {// 判断是否是都执行完成
				List<ControlledJob> list1 = jobControl.getSuccessfulJobList();
				System.out.println("成功的个数：" + list1.size());
				break;
			}
			List<ControlledJob> list2 = jobControl.getFailedJobList();
			if (list2.size() > 0) {// 有失败的
				System.out.println("失败的个数：" + list2.size());
				code = -1;
				break;
			}
		}
		return code;
	}

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo18_ControlJobMR(), args);
		System.exit(code);
	}

	public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			String str = value.toString();
			String[] strs = str.split("\\s+");
			for (String s : strs) {
				context.write(new Text(s), new LongWritable(1));
			}
		}
	}

	public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable> {

		@Override
		protected void reduce(Text key3, Iterable<LongWritable> value3,
				Reducer<Text, LongWritable, Text, LongWritable>.Context context)
				throws IOException, InterruptedException {
			long sum = 0;
			for (LongWritable ii : value3) {
				sum = sum + ii.get();
			}
			context.write(key3, new LongWritable(sum));
		}
	}

	public static class SortMapper extends Mapper<LongWritable, Text, Wordcount, NullWritable> {
		private Wordcount wordcount;

		@Override
		protected void map(LongWritable key, Text value,
				Mapper<LongWritable, Text, Wordcount, NullWritable>.Context context)
				throws IOException, InterruptedException {
			String[] strings = value.toString().split("\\s+");
			wordcount = new Wordcount(strings[0], Long.parseLong(strings[1]));
			context.write(wordcount, NullWritable.get());
		}
	}

	// Wordcount----JavaBean
	public static class Wordcount implements WritableComparable<Wordcount> {
		private String name;
		private long count;

		public Wordcount() {
		}

		public Wordcount(String name, long count) {
			this.name = name;
			this.count = count;
		}

		@Override
		public String toString() {
			return name + "\t" + count;
		}

		@Override
		public void readFields(DataInput in) throws IOException {
			name = in.readUTF();
			count = in.readLong();
		}

		@Override
		public void write(DataOutput out) throws IOException {
			out.writeUTF(name);
			out.writeLong(count);
		}

		@Override
		public int compareTo(Wordcount arg0) {
			return -(int) (arg0.count - this.count);
		}
	}
}
