package com.test.conf;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import org.apache.hadoop.contrib.utils.join.DataJoinMapperBase;
import org.apache.hadoop.contrib.utils.join.DataJoinReducerBase;
import org.apache.hadoop.contrib.utils.join.TaggedMapOutput;

public class DataJoin2 extends Configured implements Tool {
    
    public static class MapClass extends DataJoinMapperBase {
        
        protected Text generateInputTag(String inputFile) {
        	System.out.println(this.inputFile);
            System.out.println(this.inputTag);
            return new Text(inputFile);
        }
        
        protected TaggedMapOutput generateTaggedMapOutput(Object value) {
            TaggedWritable retv = new TaggedWritable();
            retv.setData((Text) value);
            retv.setTag(this.inputTag);
            System.out.println(this.inputFile);
            System.out.println(this.inputTag);
            return retv;
        }
        
        protected Text generateGroupKey(TaggedMapOutput aRecord) {
            String line = ((Text) aRecord.getData()).toString();
            String[] tokens = line.split(",");
            String groupKey = tokens[0];
            return new Text(groupKey);
        }
        
        
    }
    
    public static class Reduce extends DataJoinReducerBase {
        
        protected TaggedMapOutput combine(Object[] tags, Object[] values) {
            if (tags.length < 2) return null;  
            String joinedStr = ""; 
            for (int i=0; i<values.length; i++) {
                if (i > 0) joinedStr += ",";
                TaggedWritable tw = (TaggedWritable) values[i];
                String line = ((Text) tw.getData()).toString();
                String[] tokens = line.split(",", 2);
                joinedStr += tokens[1];
            }
            TaggedWritable retv = new TaggedWritable();
            retv.setData(new Text(joinedStr));
//            retv.setTag((Text) tags[1]); 
            return retv;
        }
    }
    
    public static class TaggedWritable extends TaggedMapOutput {
    
        private Writable data;
        
        public void setData(Writable data) {
            this.data = data;
        }
        
        public Writable getData() {
            return data;
        }
        
        public void write(DataOutput out) throws IOException {
        	out.writeUTF(this.tag.toString());
        	out.writeUTF(this.data.toString());
        }
        
        public void readFields(DataInput in) throws IOException {
        	this.tag = new Text(in.readUTF());
        	this.data = new Text(in.readUTF());
        }
    }
    
    public int run(String[] args) throws Exception {
        Configuration conf = getConf();
        
        JobConf job = new JobConf(conf, DataJoin.class);
        
        Path in = new Path("abc");
        Path out = new Path("output");
        FileInputFormat.setInputPaths(job, in);
        FileOutputFormat.setOutputPath(job, out);
        
        job.setJobName("DataJoin");
        job.setMapperClass(MapClass.class);
        job.setReducerClass(Reduce.class);
        
        job.setInputFormat(TextInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(TaggedWritable.class);
        job.set("mapred.textoutputformat.separator", ",");
        
        JobClient.runJob(job); 
        return 0;
    }
    
    public static void main(String[] args) throws Exception { 
        int res = ToolRunner.run(new Configuration(),
                                 new DataJoin2(),
                                 args);
        
        System.exit(res);
    }
}
