package edu.npu.GraphTransform;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import edu.npu.GraphUtility.GraphUtility;

public class GraphTransformDriver {
	public static final String USERNAME = "locker";
	public static final String PATH_PREFIX = "hdfs://locker:9000/user/" + USERNAME + "/";
	public static final String HADOOP_HOME = "/home/locker/hadoop-0.20.203.0/bin/";
	public static final String LOCAL_PREFIX = "/home/" + USERNAME + "/";
	public static final int metis_num = 2;
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
//		GraphUtility.GiveSig(LOCAL_PREFIX + "twitter", LOCAL_PREFIX + "twitter_sig");
//		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -put " + LOCAL_PREFIX + "twitter_sig " + PATH_PREFIX + "twitter");
		Configuration conf = new Configuration();
		Job nodeExtract, key_qualify, value_qualify, undirect, qualify, key_part, value_part;
		String nums = null;
		nodeExtract = new Job(conf, "nodeExtra");
		key_qualify = new Job(conf, "key_qualify");
		value_qualify = new Job(conf, "value_qualify");
		undirect = new Job(conf, "undirect");
		qualify = new Job(conf, "metis_qualify");
		key_part = new Job(conf, "key+partition");
		value_part = new Job(conf, "value+partition");
		//变成无向图并去重
		undirect.setJarByClass(GraphUndirect.class);
		undirect.setMapperClass(GraphUndirect.GraphUndirectMapper.class);
		undirect.setMapOutputKeyClass(GraphMetisQualify.Pair.class);
		undirect.setMapOutputValueClass(IntWritable.class);
		undirect.setReducerClass(GraphUndirect.GraphUndirectReducer.class);
		undirect.setOutputKeyClass(Text.class);
		undirect.setOutputValueClass(Text.class);
		undirect.setNumReduceTasks(2);
		
		FileInputFormat.addInputPath(undirect, new Path(PATH_PREFIX + args[0]));
		FileOutputFormat.setOutputPath(undirect, new Path(PATH_PREFIX + "undirect_graph"));
		undirect.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "undirect_graph/_logs");
		//提取节点
		nodeExtract.setJarByClass(GraphNodeExtract.class);
		nodeExtract.setMapperClass(GraphNodeExtract.GraphNodeExtractMapper.class);
		nodeExtract.setReducerClass(GraphNodeExtract.GraphNodeExtractReducer.class);
		nodeExtract.setPartitionerClass(GraphNodeExtract.GraphNodeExtractPartioner.class);
		nodeExtract.setMapOutputKeyClass(LongWritable.class);
		nodeExtract.setMapOutputValueClass(Text.class);
		nodeExtract.setOutputKeyClass(LongWritable.class);
		nodeExtract.setOutputValueClass(Text.class);
		nodeExtract.setNumReduceTasks(2);
		FileInputFormat.addInputPath(nodeExtract, new Path(PATH_PREFIX + "undirect_graph"));
		FileOutputFormat.setOutputPath(nodeExtract, new Path(PATH_PREFIX + "nodeExtr"));
		nodeExtract.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "nodeExtr/_logs");
		//将提取出的节点文件拷贝到本地
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -copyToLocal " + PATH_PREFIX + "nodeExtr " + LOCAL_PREFIX + "nodeExtr");
		//给每个节点一个编号
		GraphUtility.GiveSig(LOCAL_PREFIX + "nodeExtr",
				LOCAL_PREFIX + "nodeExtr/nodeExtr_Merge_Sig");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -put " + LOCAL_PREFIX + "nodeExtr/nodeExtr_Merge_Sig " + PATH_PREFIX + "undirect_graph/id");
		//按照id对原graph进行转换，分为key和value两部分进行
		key_qualify.setJarByClass(GraphMetisQualify.class);
		key_qualify.setMapperClass(GraphMetisQualify.Key_MetisQualifyMapper.class);
		key_qualify.setReducerClass(GraphMetisQualify.Key_MetisQualifyReducer.class);
		key_qualify.setOutputKeyClass(LongWritable.class);
		key_qualify.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(key_qualify, new Path(PATH_PREFIX + "undirect_graph"));
		FileOutputFormat.setOutputPath(key_qualify, new Path(PATH_PREFIX + "key_alter"));
		key_qualify.setNumReduceTasks(2);
		key_qualify.waitForCompletion(true);
		nums = key_qualify.getCounters().findCounter(GraphMetisQualify.COUNTER.NODE).getValue() + " " + 
				key_qualify.getCounters().findCounter(GraphMetisQualify.COUNTER.EDGE).getValue() + "\n";
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "key_alter/_logs");
		
		value_qualify.setJarByClass(GraphMetisQualify.class);
		value_qualify.setMapperClass(GraphMetisQualify.Value_MetisQualifyMapper.class);
		value_qualify.setReducerClass(GraphMetisQualify.Value_MetisQualifyReducer.class);
		value_qualify.setOutputKeyClass(LongWritable.class);
		value_qualify.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(value_qualify, new Path(PATH_PREFIX + "key_alter"));
		FileInputFormat.addInputPath(value_qualify, new Path(PATH_PREFIX + "undirect_graph/id"));
		FileOutputFormat.setOutputPath(value_qualify, new Path(PATH_PREFIX + "value_alter"));
		value_qualify.setNumReduceTasks(2);
		value_qualify.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "value_alter/_logs");
		//对所得的图转换为metis所需的形式
		qualify.setJarByClass(GraphMetisQualify.class);
		qualify.setMapperClass(GraphMetisQualify.MetisQualifyMapper.class);
		qualify.setReducerClass(GraphMetisQualify.MetisQualifyReducer.class);
		qualify.setPartitionerClass(GraphMetisQualify.MetisQualifyPartitioner.class);
		qualify.setMapOutputKeyClass(LongWritable.class);
		qualify.setMapOutputValueClass(LongWritable.class);
		qualify.setOutputKeyClass(LongWritable.class);
		qualify.setOutputValueClass(Text.class);
		FileInputFormat.addInputPath(qualify, new Path(PATH_PREFIX + "value_alter"));
		FileOutputFormat.setOutputPath(qualify, new Path(PATH_PREFIX + "metis"));
		qualify.setNumReduceTasks(2);
		qualify.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "metis/_logs");
		//删除没用的文件夹
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "key_alter");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -copyToLocal " + PATH_PREFIX + "metis " + LOCAL_PREFIX + "metis");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "metis");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "nodeExtr");
		GraphUtility.Merge(LOCAL_PREFIX + "metis", LOCAL_PREFIX + "metis/metis_merge", nums);
		//利用metis进行分割
		GraphUtility.Exec("gpmetis " + LOCAL_PREFIX + "metis/metis_merge " + metis_num);
		GraphUtility.Exec("mkdir " + LOCAL_PREFIX + "metis/metis_for_sig");
		GraphUtility.Exec("mv " + LOCAL_PREFIX + "metis/metis_merge.part." + metis_num + " " +
				LOCAL_PREFIX + "metis/metis_for_sig/");
		GraphUtility.GiveSig(LOCAL_PREFIX + "metis/metis_for_sig", 
				LOCAL_PREFIX + "metis/partition");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -put " + LOCAL_PREFIX + "metis/partition " +
				PATH_PREFIX + "value_alter/partition");
		//对key加上partition信息
		key_part.setJarByClass(GraphPartSig.class);
		key_part.setMapperClass(GraphPartSig.Key_GraphSigMapper.class);
		key_part.setReducerClass(GraphPartSig.Key_GraphSigReducer.class);
		key_part.setOutputKeyClass(Text.class);
		key_part.setOutputValueClass(Text.class);
		key_part.setMapOutputKeyClass(LongWritable.class);
		key_part.setMapOutputValueClass(Text.class);
		FileInputFormat.addInputPath(key_part, new Path(PATH_PREFIX + "value_alter"));
		FileOutputFormat.setOutputPath(key_part, new Path(PATH_PREFIX + "key_part"));
		key_part.setNumReduceTasks(2);
		key_part.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "key_part/_logs");

		//对value加上partition的信息
		value_part.setJarByClass(GraphPartSig.class);
		value_part.setMapperClass(GraphPartSig.Value_GraphSigMapper.class);
		value_part.setReducerClass(GraphPartSig.Value_GrapSigReducer.class);
		value_part.setOutputKeyClass(Text.class);
		value_part.setOutputValueClass(Text.class);
		value_part.setMapOutputKeyClass(LongWritable.class);
		value_part.setMapOutputValueClass(Text.class);
		FileInputFormat.addInputPath(value_part, new Path(PATH_PREFIX + "key_part"));
		FileInputFormat.addInputPath(value_part, new Path(PATH_PREFIX + "value_alter/partition"));
		FileOutputFormat.setOutputPath(value_part, new Path(PATH_PREFIX + "graph_part"));
		value_part.setNumReduceTasks(2);
		value_part.waitForCompletion(true);
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "graph_part/_logs");
		//移除不需要的信息
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "value_alter");
		GraphUtility.Exec(HADOOP_HOME + "hadoop fs -rmr " + PATH_PREFIX + "key_part");
	}
}
