package edu.npu.GraphTransform;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import edu.npu.GraphTransform.GraphMetisQualify.Pair;

//数据变成无向图
public class GraphUndirect {
	private static final String SEPARATOR = "\t";
	
	public static class GraphUndirectMapper extends Mapper<LongWritable, Text, Pair, IntWritable> {
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String line = value.toString();
			String[] splits = line.split(SEPARATOR);
//			if (Long.parseLong(splits[0].substring(1)) % 25 == 0) {
				if (Long.parseLong(splits[0]) < Long.parseLong(splits[1]))
					context.write(
							new Pair(
									new LongWritable(Long.parseLong(splits[0])),
									new LongWritable(Long.parseLong(splits[1]))),
							new IntWritable(1));
				else
					context.write(
							new Pair(
									new LongWritable(Long.parseLong(splits[1])),
									new LongWritable(Long.parseLong(splits[0]))),
							new IntWritable(1));
//			}
		}
	}
	public static class GraphUndirectReducer extends Reducer<Pair, IntWritable, Text, Text> {
		@Override
		protected void reduce(Pair key, Iterable<IntWritable> values, Context context)
				throws IOException, InterruptedException {
			//相同的边作为key被去重
			context.write(new Text(key.v1.toString() + SEPARATOR + key.v2.toString()), null);
		}
	}
}