package com.rrd.dw.mr.text;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.rrd.dw.mr.key.IntPair;
import com.rrd.dw.mr.key.PairKeyASC;
import com.rrd.dw.utils.HadoopUtils;

public class AuditRefundPollingMR  extends Configured implements Tool {
	public static final String CRT_FLAG = "\001";
    public static final Pattern CRT_PATTERN = Pattern.compile(CRT_FLAG);
	
    public static class PollingMaper extends Mapper<LongWritable, Text, PairKeyASC, Text> {
		 private PairKeyASC ekey = new PairKeyASC();
		private String line = "";
		private String arr[]=null; 
		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, PairKeyASC, Text>.Context context)
				throws IOException, InterruptedException {
			if(value==null){
				return;
			}
			line=value.toString();
			arr=CRT_PATTERN.split(line);
			if(arr==null || arr.length<6){
				return;
			} 
			ekey.setKey(arr[1]+CRT_FLAG+arr[2]+CRT_FLAG+arr[3]+CRT_FLAG+arr[4], arr[7]);
			
			context.write(ekey, value);
		}
	}

	public static class PoollingReduce extends Reducer<PairKeyASC, Text, NullWritable, Text> {
		private Map<String,String> map= new HashMap<String,String>();
		private Double poolAmt=0d;
		private String[] arr = null; 
		private Text val =new Text();
		private StringBuilder sb = new StringBuilder();
		private String kstr= "";
		private Double addupRefundAmt=0d;
		@Override
		protected void reduce(PairKeyASC key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			map.clear(); 
			this.poolAmt=Double.parseDouble(CRT_PATTERN.split(key.getFirst())[3]);
			addupRefundAmt=0d;
			for(Text t:values){
				sb.delete(0, sb.length());
				arr = CRT_PATTERN.split(t.toString());
				kstr=arr[0]+CRT_FLAG+key.getFirst();
				if((poolAmt - Double.parseDouble(arr[6]))<0){ 
					addupRefundAmt=addupRefundAmt+poolAmt;
					sb.append(kstr).append(CRT_FLAG).append(arr[5]).append(CRT_FLAG).append(poolAmt).append(CRT_FLAG).append(addupRefundAmt);
					sb.append(CRT_FLAG).append("0");
					val.set(sb.toString());
					context.write(NullWritable.get(), val); 
					break;
				}else{
					poolAmt = poolAmt - Double.parseDouble(arr[6]);
					addupRefundAmt=addupRefundAmt+Double.parseDouble(arr[6]);
					sb.append(kstr).append(CRT_FLAG).append(arr[5]).append(CRT_FLAG).append(arr[6]).append(CRT_FLAG).append(addupRefundAmt);
					sb.append(CRT_FLAG).append("1");
					val.set(sb.toString());
					context.write(NullWritable.get(), val);
				}
				
			}
			 
			 
		}
	}
	/*
     * 自定义分区函数类FirstPartitioner，根据 IntPair中的first实现分区
     */
    public static class FirstPartitioner extends Partitioner<PairKeyASC, Text>{
        @Override
        public int getPartition(PairKeyASC key, Text value,int numPartitions){ 
            return Math.abs(key.getFirst().hashCode() * 127) % numPartitions;
        }
    }
    /*
     * 自定义GroupingComparator类，实现分区内的数据分组
     */
    @SuppressWarnings("rawtypes")
    public static class GroupingComparator extends WritableComparator{
        protected GroupingComparator(){
            super(PairKeyASC.class, true);
        }
        
        @Override
        public int compare(WritableComparable w1, WritableComparable w2){
        	PairKeyASC ip1 = (PairKeyASC) w1;
        	PairKeyASC ip2 = (PairKeyASC) w2;
            String l = ip1.getFirst();
            String r = ip2.getFirst();
            return l.equals(r) ? 0 : (l.compareTo(r)<0? -1 : 1);
        }
    }
	@Override
	public int run(String[] args) throws Exception {
		// TODO Auto-generated method stub
		Configuration conf = getConf();
        Job job = Job.getInstance( conf);
    
         
        
        String inputDir = conf.get("param.input.path");
         String outDir = conf.get("param.out.path");
         if(inputDir==null || inputDir.equals("")){
        	 System.out.println("请输入输入路径：param.input.path");
        	 System.exit(0);
         }
         if(outDir==null || outDir.equals("")){
        	 System.out.println("请输入输出路径：param.out.path");
        	 System.exit(0);
         }
        System.out.println("param.input.path=" + inputDir);
        System.out.println("param.out.path=" + outDir);
        Path outpath = new Path(outDir);
        HadoopUtils.delete(conf, outpath);  
        TextInputFormat.setInputPaths(job, new Path(inputDir));
        TextOutputFormat.setOutputPath(job, new Path(outDir));
        FileOutputFormat.setCompressOutput(job, true); 
        job.setMapperClass(PollingMaper.class);
        job.setReducerClass(PoollingReduce.class);
        job.setPartitionerClass(FirstPartitioner.class);
        job.setGroupingComparatorClass(GroupingComparator.class);
        job.setOutputKeyClass(Text.class);
        job.setMapOutputKeyClass(PairKeyASC.class);
        
//        job.setNumReduceTasks(20);
        job.setJarByClass(this.getClass());
        

        int state = job.waitForCompletion(true) ? 0 : 1;
        return state;
	}
	
public static void main(String[] args) throws Exception {
	 int res = ToolRunner.run(new AuditRefundPollingMR(), args);
     System.exit(res);
}
}
