package com.algo.panelmn;
import java.io.IOException;
import java.util.*;
import java.io.BufferedReader;
import java.io.FileReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class GetLogv2 extends Configured implements Tool {
	public static class Mapper extends org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, Text> 
	{
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String str=value.toString();
			int beg=str.indexOf("^k=");
			if(beg>0)
			{
				int end=str.indexOf("^",beg+3);
				String caid=str.substring(beg+3,end);
				int caidInt=Integer.parseInt(caid);
				if(caidInt>=1000000&&caidInt<2000000)
				{
					String[] items = str.split("\\^|=");
					if(items.length>13)
					{
						String info=items[3]+" "+items[9]+" "+items[11]+" "+items[13];
						String id=items[7];
					//plt=0^tp=imp^k=1000501^uuid=3E2TX0ix19r0^ti=1411143836^ip=101.89.84.50^p=100014605
						context.write(new Text(id),new Text(info));
					}
				}
			}
		}
	}
	public static class Reducer extends org.apache.hadoop.mapreduce.Reducer<Text, Text, Text, Text> 
	{
		Set ids=new HashSet();
		public void setup(Context context) throws IOException
		{
			 FileReader fr = new FileReader("panel.txt");  //必须和上传文件名一致  
			 BufferedReader reader = new BufferedReader(fr);  
              
        		 String id = null;  
            		 while((id = reader.readLine()) != null)  
                	 ids.add(id);    
        	}  
		Random r = new Random();
		public void reduce(Text key, Iterable<Text> values,Context context)
			 throws IOException, InterruptedException 
                {
			Long min = 9999999999L,max=0L;
		        List<String> stringArray=new ArrayList<String>();
			for (Text value : values) {
				
				String str=value.toString();
				String[] items = str.split(" ");
				Long time=Long.parseLong(items[1]);
				stringArray.add(items[3]+" "+items[0]+" "+items[2]);
				if(time<min)
					min=time;
				else if(time>max)
					max=time;
			}
			if(ids.contains(key.toString())||(max-min>2*3600&&r.nextDouble()<0.015))
			{
				for(String string:stringArray)
					context.write(key, new Text(string));
			}
		}
	}
	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new GetLogv2(), args);
		System.exit(res);
	}

	public int run(String[] args) throws Exception {

		Configuration conf = getConf();

		@SuppressWarnings("deprecation")
		Job job = new Job(conf, "test");
		job.setJarByClass(GetLogv2.class);
		job.setMapperClass(Mapper.class);
		job.setReducerClass(Reducer.class);
		job.setInputFormatClass(com.supertool.tong.merger.util.MzSequenceFileInputFormat.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);

		//job.setNumReduceTasks(0);

		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		FileInputFormat.addInputPath(job, new Path(args[0]));

		return job.waitForCompletion(true) ? 0 : 1;
	}
}
