package com.algo.panelmn;



import java.io.DataInput;  
import java.io.DataOutput;  
import java.io.IOException;  
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;  





import java.util.Map;

import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.conf.Configured;  
import org.apache.hadoop.fs.Path;  
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;  
import org.apache.hadoop.io.Writable;  
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
//import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.mapreduce.lib.chain.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;  
import org.apache.hadoop.util.ToolRunner;  














import com.supertool.tong.merger.util.MzSequenceFileCaidInputFormat;
  
public class DataJoin extends Configured implements Tool {  
	
	public static  class UuidPartitioner extends Partitioner<UuidWritable, LongWritable>{
		@Override
		public int getPartition(UuidWritable key, LongWritable value, int numPartitions) {
			// TODO Auto-generated method stub
			return (int) (key.getUuid().hashCode()%numPartitions);
		}
	}
	public static class UuidSortComparator extends WritableComparator{
		/*public UuidSortComparator(){
			super(UuidWritable.class, true);
		}*/
		public int compare(WritableComparable comkey1, WritableComparable comkey2) {
			UuidWritable key1 = (UuidWritable)comkey1;
			UuidWritable key2 = (UuidWritable)comkey2;
			if(key1 == null || key2 == null){	System.out.println("null key here: uuidsort"); return 1;}
			int cmp;
			if((cmp = key1.compareTo(key2)) != 0)	return cmp;
			else	return key1.compareTimeTo(key2);
		}
	}
	public static class UuidGroupSort extends WritableComparator{
		/*public UuidGroupSort(){
			super(UuidWritable.class, true);
		}*/
		public int compare(WritableComparable comkey1, WritableComparable comkey2) {
			UuidWritable key1 = (UuidWritable)comkey1;
			UuidWritable key2 = (UuidWritable)comkey2;
			if(key1 == null || key2 == null){	System.out.println("null key here: uuidgroup"); return 1;}
			return key1.compareTo(key2);
		}
	}
	public static class Mappercamp extends org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, UuidWritable, Text> 
	{
		//
		UuidWritable wrt = new UuidWritable();
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String str = value.toString();
			int start = str.indexOf("plt=") + 4;
			if(start < 4) {
				System.out.println("no plt element");	return;
			}
			int end = start + 1;//str.indexOf("^",start);
			//Boolean.parseBoolean("true");
			int ispc = Integer.parseInt(str.substring(start, end));
			if(ispc == 0){//0
				start = str.indexOf("uuid=") + 5;
				end = str.indexOf("^",start);
				String uuid = str.substring(start,end);
				start = str.indexOf("ti=") + 3;
				end=str.indexOf("^",start);
				long time = Long.parseLong(str.substring(start,end));
				//context.write(new UuidWritable(uuid,time), new LongWritable(-1));
				wrt.set(uuid,time);
				if(uuid == null || time < 0 || wrt ==null) System.out.println("uuid map null"+time+wrt);
				context.write(wrt, new Text("0"));
				//-1 stands for invalid sns id
			}
			//else context.write(new UuidWritable("-100",100), new LongWritable(-100));
		}
	}
	public static class Mappersns extends org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, UuidWritable, Text> {
		//
		UuidWritable wrt = new UuidWritable();
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String str = value.toString();
			String[] sns = str.split("\\^");
			if(sns.length == 4){
				String uuid = sns[0];
				String snsid = sns[1];
				long time = Long.parseLong(sns[2]);
				wrt.set(uuid, time);
				context.write(wrt, new Text(snsid));
			}
			else	System.out.println("less sns element");
		}
	}
	public static class NoReducer extends org.apache.hadoop.mapreduce.Reducer<UuidWritable, Text, UuidWritable, Text> 
	{
		public void reduce(UuidWritable key, Iterable<Text> values, Context context)
			 throws IOException, InterruptedException {
           
			Iterator it = values.iterator();
			if(it.hasNext()){
				Text lw = (Text)it.next();
				context.write(key, lw);
				
			}
		}
	}
	public static class Reducer extends org.apache.hadoop.mapreduce.Reducer<UuidWritable, LongWritable, LongWritable, LongWritable> 
	{
		public void reduce(UuidWritable key, Iterable<LongWritable> values,Context context)
			 throws IOException, InterruptedException {
            //the same uuid was passed to the same reduce and sorted in time
			long psnsnum = 0;
			long psnsid = -1;//the prior sns id 
			//long ptaged = -1;
			Map sns = new HashMap();
			Iterator it = values.iterator();
			while(it.hasNext()){
				long lw = ((LongWritable)it.next()).get();
				if(lw > 0){
					psnsid = lw;
					sns.put(psnsid, new Counter(psnsnum));
					break;//break in the first sns id
				}
				else	psnsnum++;
			}
			while(it.hasNext()){
				long lw = ((LongWritable)it.next()).get();
				if(lw < 0){//invalid sns id stands for campaign to frag
					if(psnsid > 0){
						if(sns.containsKey(psnsid)) ((Counter)sns.get(psnsid)).num++;
						else sns.put(psnsid, new Counter(1));
					}
				}
				else	psnsid = lw;
			}
			if(sns.size() > 0){
				context.write(new LongWritable(-1), new LongWritable(1));//-1 stands for tagded sns id
				it = sns.keySet().iterator();
				while(it.hasNext()){
					long snsid = (Long) it.next();
					Counter nt = (Counter) sns.get(snsid);
					context.write(new LongWritable(snsid), new LongWritable(nt.num));
				}
			}
			else
				if(psnsid < 0)	context.write(new LongWritable(-2), new LongWritable(1));//-2 stands for no sns id tagged
		}
	}
	/*public static class LastReducer extends org.apache.hadoop.mapreduce.Reducer<LongWritable, LongWritable, LongWritable, LongWritable> {
		public void reduce(LongWritable key, Iterable<LongWritable> values,Context context)
				 throws IOException, InterruptedException {
			long snsid = key.get();
			Iterator it =values.iterator();
			long freq = 0;
			while(it.hasNext())
				freq += ((LongWritable)it.next()).get();
			if(snsid < 0){
				context.write(key, new LongWritable(freq));
			}
			else if(snsid > 0){
				while(freq > 0){
					context.write(new LongWritable(freq), new LongWritable(1));
					freq--;
				}
			}
		}
	}*/
    public int run(String[] args) throws Exception {  
        Configuration conf = getConf();  
        conf.set("caidfilename", "caid.txt");
        
        Job job = new Job(conf, "cookie");
        job.setJarByClass(DataJoin.class);
        
        Path p1 = new Path(args[0]);
        Path p2 = new Path(args[1]);
        Path p3 = new Path(args[2]);
        
        /*MultipleInputs.addInputPath(job, p1, MzSequenceFileCaidInputFormat.class, Mappercamp.class);
        MultipleInputs.addInputPath(job, p2, TextInputFormat.class, Mappersns.class);
        
        FileOutputFormat.setOutputPath(job, p3);*/
        
        
        FileInputFormat.setInputPaths(job, p1);
        FileOutputFormat.setOutputPath(job, p2);
        job.setInputFormatClass(TextInputFormat.class);
        job.setMapperClass(Mappersns.class);
        //job.setPartitionerClass(UuidPartitioner.class);
        //job.setGroupingComparatorClass(UuidGroupSort.class);
        //job.setSortComparatorClass(UuidSortComparator.class);
        //job.setJobName("DataJoin");
        //job.setMapperClass(MapClass.class);
        //job.setReducerClass(Reducer.class);
        job.setReducerClass(NoReducer.class);
        //job.setInputFormat(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);
        
        job.setMapOutputKeyClass(UuidWritable.class); 
        job.setMapOutputValueClass(Text.class);  
        job.setOutputKeyClass(UuidWritable.class); 
        job.setOutputValueClass(Text.class);  
        /*Configuration reduceconf = new Configuration(false);
        ChainReducer.setReducer(job, Reducer.class, LongWritable.class, LongWritable.class, 
        		LongWritable.class, LongWritable.class, reduceconf);
        ChainReducer.setReducer(job, LongSumReducer.class, LongWritable.class, LongWritable.class, 
        		LongWritable.class, LongWritable.class, reduceconf);*/
          
        return job.waitForCompletion(true) ? 1 : 0;
    }
    
    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(),
                                 new DataJoin(),
                                 args);
        System.exit(res);
    }
}