package com.hadoop.singletable;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class SingleTableJoin {

	public static int time=0;
	
	/*
	 * map将输入分割成child和parent，然后正序输出一次作为右表。
	 * 反序输出一次作为左表，需要注意的是在输出的value中必须加上左右表
	 * 的区别标志。
	 */
	public static class Map extends Mapper<LongWritable, Text, Text, Text> {

		@Override
		protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

			String childName=new String();
			String parentName=new String();
			String relationType=new String();
			String line=value.toString();
			
			/*int i=0;
			while(line.charAt(i)!=' '){
				i++;
			}
			String[] values={line.substring(0,i),line.substring(i+i)};*/
			String[] values=line.split(" ");
			
			if(values[0].compareTo("child")!=0){
				childName=values[0];
				parentName=values[1];
				
				//左表
				relationType="1";
				context.write(new Text(values[1]), new Text(relationType+"#"+childName+"#"+parentName));
				//右表
				relationType="2";
				context.write(new Text(values[0]), new Text(relationType+"#"+childName+"#"+parentName));

			}
		}
	}

	public static class Reduce extends Reducer<Text, Text, Text, Text> {

		@Override
		protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
		
			if(time==0){
				context.write(new Text("grandChild"), new Text("grendParent"));
				time++;
			}
			
			int grandChildNum=0;
			String[] grandChild=new String[10];
			int grandParentNum=0;
			String[] grandParent=new String[10];
			
			Iterator ite=values.iterator();
			while(ite.hasNext()){
				String record=ite.next().toString();
				int len=record.length();
				if(len==0)continue;
				
				//char relationType=record.charAt(0);
				
				String[] valueArray=record.split("#");
				String relationType=valueArray[0];
				String childName=valueArray[1];
				String parentName=valueArray[2];
				
				if(relationType.equals("1")){//左表
					grandChild[grandChildNum]=childName;
					grandChildNum++;
				}else{
					grandParent[grandParentNum]=parentName;
					grandParentNum++;
				}
				
			}//while
			
			if(grandChildNum!=0&&grandParentNum!=0){
				for(int m=0;m<grandChildNum;m++){
					for(int n=0;n<grandParentNum;n++){
						context.write(new Text(grandChild[m]), new Text(grandParent[n]));//输出结果
					}
				}
			}//if
			
		}//reduce
	}

	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		// TODO Auto-generated method stub

		Configuration conf=new Configuration();
		String[] otherArgs=new GenericOptionsParser(conf,args).getRemainingArgs();
		if(otherArgs.length!=2){
			System.err.println("Usage: SingleTableJoin <in> <out>");
			System.exit(2);
		}
		
		Job job=Job.getInstance(conf, "single table join");
		job.setJarByClass(SingleTableJoin.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		System.exit(job.waitForCompletion(true)?0:1);
	}

}
