package chen.bupt.mapreduce.workflow;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import chen.bupt.constant.Constants;
import chen.bupt.mapreduce.feature.FeatureExtractJob;
import chen.bupt.mapreduce.segterm.SegTermJob;
import chen.bupt.mapreduce.tfidf.TFIDFJob;
import chen.bupt.util.HDFSFileUtils;

public class Workflow {

	/**
	 * @param args
	 * @throws Exception 
	 */
	public static void main(String[] args) throws Exception {
		//SegTermJob
		Configuration conf1 = new Configuration();
		Job job1 = new Job(conf1, "WordSplit");
		job1.setJarByClass(Workflow.class);
		job1.setMapperClass(SegTermJob.Map.class);
		job1.setOutputKeyClass(Text.class);
		job1.setOutputValueClass(NullWritable.class);
		Path input1 = new Path(Constants.SOURCE_INPUT);
		Path output1 = new Path(Constants.SEG_PATH);
		HDFSFileUtils.deleteFile(output1, conf1);
		FileInputFormat.addInputPath(job1, input1);
		FileOutputFormat.setOutputPath(job1, output1);
		job1.waitForCompletion(true);
		
		//TFIDFJob
		Configuration conf2 = new Configuration();
		Path input2 = new Path(Constants.SEG_PATH);
		Path Tmp1 = new Path(Constants.TMP1);
		Path output2 = new Path(Constants.TFIDF_PATH);
		HDFSFileUtils.deleteFile(Tmp1, conf2);
		conf2.set("mapred.child.tmp", "/tmp/child");
		Job job2 = new Job(conf2, "tfidf1");
		job2.setJarByClass(Workflow.class);
		job2.setMapperClass(TFIDFJob.Map1.class);
		job2.setReducerClass(TFIDFJob.Reduce1.class);
		job2.setMapOutputKeyClass(Text.class);
		job2.setMapOutputValueClass(Text.class);
		job2.setOutputKeyClass(Text.class);
		job2.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(job2, input2);
		FileOutputFormat.setOutputPath(job2, Tmp1);
		job2.waitForCompletion(true);

		Configuration conf3 = new Configuration();
		conf3.set("mapred.child.tmp", "/tmp/child");
		Job job3 = new Job(conf3, "tfidf2");
		job3.setMapOutputKeyClass(Text.class);
		job3.setMapOutputValueClass(Text.class);
		job3.setJarByClass(Workflow.class);
		job3.setMapperClass(TFIDFJob.Map2.class);
		job3.setReducerClass(TFIDFJob.Reduce2.class);
		job3.setOutputKeyClass(Text.class);
		job3.setOutputValueClass(NullWritable.class);
		FileInputFormat.addInputPath(job3, Tmp1);
		FileOutputFormat.setOutputPath(job3, output2);
		job3.waitForCompletion(true);
		HDFSFileUtils.deleteFile(Tmp1, conf3);
		
		//FeatureExtractJob
		Configuration conf4 = new Configuration();
		Job job4 = new Job(conf4, "featureExtract");
		job4.setJarByClass(Workflow.class);
		job4.setMapperClass(FeatureExtractJob.Map1.class);
		job4.setReducerClass(FeatureExtractJob.Reduce1.class);
		job4.setOutputKeyClass(Text.class);
		job4.setOutputValueClass(Text.class);
		job4.setNumReduceTasks(1);
		Path input4 = new Path(Constants.TFIDF_PATH);
		Path output4 = new Path(Constants.FEATURE_TERM_PATH);
		HDFSFileUtils.deleteFile(output4, conf4);
		FileInputFormat.addInputPath(job4, input4);
		FileOutputFormat.setOutputPath(job4, output4);
		System.exit(job4.waitForCompletion(true) ? 0 : 1);
	}

}
