package edu.npu.GraphRedundance;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import edu.npu.GraphTransform.GraphSlim;

public class TriHopRedundanceDriver {
	final static String SEPARATOR = "\t";
	public static final String USERNAME = "zhao";
	public static final String PATH_PREFIX = "hdfs://test118:9000/user/" + USERNAME + "/";
	
	public static class TriHopRedundanceMapper extends Mapper<LongWritable, Text, Text, Text> {
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			String[] splits = line.split(SEPARATOR);

			if(line.contains("#")){
				String[] kParts = splits[0].split("#");
				String[] vParts = splits[1].split("#");
				//属于同一块才跟两跳相连
				if (kParts[1].equals(vParts[1])) {
					context.write(new Text(kParts[0]), new Text(vParts[0]));
					context.write(new Text(vParts[0]), new Text(kParts[0]));
				}
			} else 
				context.write(new Text(splits[2]), new Text(splits[0] + SEPARATOR + splits[1]));
		}
	}
	
	public static class TriHopRedundanceReducer extends Reducer<Text, Text, Text, Text> {
		@Override
		protected void reduce(Text key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			int b = -1;
			List<Long> list = new ArrayList<Long>();
			List<Integer> block = new ArrayList<Integer>();
			for(Text value : values) {
				if(value.toString().split(SEPARATOR).length == 1)
					list.add(Long.parseLong(value.toString()));
				else {
					b = Integer.parseInt(value.toString().split(SEPARATOR)[0]);
					if(!block.contains(b))
						block.add(b);
				}
			}
			for(int i = 0; i < block.size(); i++) {
				for(int j = 0; j < list.size(); j++) {
					context.write(new Text(block.get(i) + ""), 
							new Text(key + SEPARATOR + list.get(j)));
				}
			}
		}
	}
	
	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
		Configuration conf = new Configuration();
		Job job = new Job(conf, "triHop");
		job.setJarByClass(TriHopRedundanceDriver.class);
		job.setMapperClass(TriHopRedundanceDriver.TriHopRedundanceMapper.class);
		job.setReducerClass(TriHopRedundanceDriver.TriHopRedundanceReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setNumReduceTasks(12);
		FileInputFormat.addInputPath(job, new Path(PATH_PREFIX + "graph_part"));
		FileInputFormat.addInputPath(job, new Path(PATH_PREFIX + "twoHop"));
		FileOutputFormat.setOutputPath(job, new Path(PATH_PREFIX + "triHop"));
		job.waitForCompletion(true);
		
		Job slim = new Job(conf, "triSlim");
		slim.setJarByClass(GraphSlim.class);
		slim.setMapperClass(GraphSlim.GraphSlimMapper.class);
		slim.setReducerClass(GraphSlim.GraphSlimReducer.class);
		slim.setOutputKeyClass(Text.class);
		slim.setOutputValueClass(Text.class);
		slim.setNumReduceTasks(12);
		FileInputFormat.addInputPath(slim, new Path(PATH_PREFIX + "twoHop_slim"));
		FileInputFormat.addInputPath(slim, new Path(PATH_PREFIX + "triHop"));
		FileOutputFormat.setOutputPath(slim, new Path(PATH_PREFIX + "triHop_slim"));
		slim.waitForCompletion(true);
	}
}
