package com.hadoop.datajoin;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.contrib.utils.join.DataJoinMapperBase;
import org.apache.hadoop.contrib.utils.join.DataJoinReducerBase;
import org.apache.hadoop.contrib.utils.join.TaggedMapOutput;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class DataJoin extends Configured{

	public static class MapClass extends DataJoinMapperBase{

		@Override
		protected Text generateGroupKey(TaggedMapOutput aRecord) {
			// TODO Auto-generated method stub
			String line=aRecord.getData().toString();
			String[] tokens=line.split(",");
			String grupKey=tokens[0];
			return new Text(grupKey);
		}

		@Override
		protected Text generateInputTag(String inputFile) {
			// TODO Auto-generated method stub
			String datasource=inputFile.split("-")[0];
			return new Text(datasource);
		}

		@Override
		protected TaggedMapOutput generateTaggedMapOutput(Object value) {
			// TODO Auto-generated method stub
			TaggedWritable returnValue=new TaggedWritable((Text)value);
			returnValue.setTag(this.inputTag);
			return returnValue;
		}
		
	}
	
	public static class ReduceClass extends DataJoinReducerBase{

		@Override
		protected TaggedMapOutput combine(Object[] tags, Object[] values) {
			// TODO Auto-generated method stub
			if(tags.length<2)
			  return null;
			
			String joinedStr="";
			for(int i=0;i<values.length;i++){
				if(i>0)joinedStr+=",";
				TaggedWritable tw=(TaggedWritable)values[i];
				String line=tw.getData().toString();
				String[] tokens=line.split(",", 2);
				joinedStr+=tokens[1];
			}
			TaggedWritable retv= new TaggedWritable(new Text(joinedStr));
			retv.setTag((Text)tags[0]);
			return retv;
		}
		
	}
	
	public static class TaggedWritable extends TaggedMapOutput{

		private Writable data;
		
		public TaggedWritable(Writable data){
			this.tag=new Text("");
			this.data=data;
		}
		@Override
		public void write(DataOutput out) throws IOException {
			// TODO Auto-generated method stub
			this.tag.write(out);
			this.data.write(out);
		}

		@Override
		public void readFields(DataInput in) throws IOException {
			// TODO Auto-generated method stub
			this.tag.readFields(in);
			this.data.readFields(in);
		}

		@Override
		public Writable getData() {
			// TODO Auto-generated method stub
			return this.data;
		}
	}
	
	
	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub
		Configuration conf=new Configuration();
		conf.set("mapred.textoutputformat.separator", ",");
		Job job=Job.getInstance(conf, "data join");
		
		Path in=new Path(args[0]);
		Path out=new Path(args[1]);
		
		FileInputFormat.addInputPath(job, in);
		FileOutputFormat.setOutputPath(job, out);
		
		job.setJarByClass(DataJoin.class);
		//job.setMapperClass(MapClass.class);
		//job.setReducerClass(ReduceClass.class);
		
		
		//job.setMapperClass(MapClass.class);
		//job.setReducerClass(ReduceClass.class);
		
		job.setInputFormatClass(TextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		//System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
	
}
