package com.rrd.dw.mr.gzip.bhzx;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.rrd.dw.mr.key.IntPair;
import com.rrd.dw.utils.HadoopUtils;
/**
 * 按新的逻辑口径包含垫付，计算逾期
 * @author liubaoxin
 *
 */
public class BHZXD3AdvanceMR extends Configured implements Tool {
	public static final String CRT_FLAG = "\001";
    public static final Pattern CRT_PATTERN = Pattern.compile(CRT_FLAG);
    
	public static class D3Maper extends Mapper<LongWritable, Text, IntPair, Text> {
		 private IntPair ekey = new IntPair();
		private String line = "";
		private String arr[]=null;
		private int loanState=0;
		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, IntPair, Text>.Context context)
				throws IOException, InterruptedException {
			if(value==null){
				return;
			}
			line=value.toString();
			arr=CRT_PATTERN.split(line);
			if(arr==null || arr.length<22){
				return;
			}
		    if(!arr[19].equals("PAYOFF") ){
		    	loanState=1; //实际已经还清
		    	context.getCounter("loan","act_payoff.records").increment(1);
		    }else if(arr[19].equals("UNPAYOFF") && !arr[19].equals("")){
		    	loanState=2; //实际已经未还清，垫付已经结清
		    	context.getCounter("loan","advance_payoff.records").increment(1);
		    }else{
		    	loanState=3; //未实际还清，未垫付结清
		    	context.getCounter("loan","not_payoff.records").increment(1);
		    }
			
			ekey.set(arr[1]+","+loanState, Integer.parseInt(arr[6]));
			
			context.write(ekey, value);
		}
	}

	public static class D3Reduce extends Reducer<IntPair, Text, NullWritable, Text> {
		private List<Map<String,String>> list_map= new ArrayList<Map<String,String>>(200);
		
		
		private Map<String,String> parseLine(String arr[] ){
			Map<String,String> map = new HashMap<String,String>(50);
			map.put("loan_key", arr[0]);
			map.put("loan_id", arr[1]);
			map.put("loan_payoff_time",arr[2] );
			map.put("advance_payoff_time",arr[3] );
			map.put("loan_amt",arr[4] );
			map.put("repay_plan_key",arr[5] );
			map.put("term_no",arr[6] );
			map.put("target_repayment_date",arr[7] );
			map.put("payoff_time",arr[8] ); 
			map.put("planned_payment",arr[9] );  
			map.put("next_due_date", arr[10]);  //下一期应还款日期
			map.put("next_payoff_time", arr[11]);  //下一期的结清时间
			map.put("last_due_date", arr[12]);  //最后一期应还日期
			map.put("last_period", arr[13]);  //最后一期期数 
			map.put("act_repay_time", arr[14]);  //借款端实际还款时间
			map.put("name",arr[15] ); 
			map.put("pid",arr[16] ); 
			map.put("mobile",arr[17] ); 
			map.put("period_status",arr[18] ); 
			map.put("loan_status",arr[19] ); 
			map.put("loan_platform", arr[20]);  //进件平台
			map.put("inv_loan_id", arr[21]);  //理财度进件ID
			map.put("transfer_type", arr[22]);  //债转标识
			map.put("transfer_time", arr[23]);  //转让时间
			
			map.put("dt", arr[24]);  //观察日
			
			map.put("is_true", "0");
			return map;
		}
		@Override
		protected void reduce(IntPair key, Iterable<Text> values, Context context)
				throws IOException, InterruptedException {
			list_map.clear(); 
			for(Text t :values){
				this.list_map.add(parseLine(CRT_PATTERN.split(t.toString())));
				
			}
			procActPaytime();
			BHZXAdvancePayoffUtils.act_payoff(context, list_map);
		}
		
		private List<Map<String,String>> procActPaytime() throws RuntimeException{
			String preTime="";
			String state="";
			Map<String,String> map=null;
			List<Integer> arr = new ArrayList<Integer>();
			
			for(int i=0;i<list_map.size();i++){
				map=list_map.get(i);
				state=map.get("period_status");
				if(state.equals("UNPAYOFF")){
					continue;
				}
				if(!map.get("payoff_time").equals("")){
					
					preTime=map.get("payoff_time");
					for(int j : arr){
						list_map.get(j).put("payoff_time", preTime);
					}
					arr.clear();
					
					continue;
				} 
				if(map.get("target_repayment_date").compareTo("2018-11-30")<=0){
				if(state.equals("IN_ADVANCE_PAYOFF") || state.equals("PAYOFF")) {
					map.put("payoff_time", preTime);
				}else if(state.equals("OVER_DUE_PAYOFF")){
					arr.add(i);
				}else{
					System.out.println(map.get("loan_id")+","+map.get("loan_key")+","+map.get("period_status"));
					throw new RuntimeException("展期处理报错："+map.get("loan_id"));
				}
				}
			}
			return list_map;
		}
		
	}	
	
	/*
     * 自定义分区函数类FirstPartitioner，根据 IntPair中的first实现分区
     */
    public static class FirstPartitioner extends Partitioner<IntPair, Text>{
        @Override
        public int getPartition(IntPair key, Text value,int numPartitions){ 
            return Math.abs(key.getFirst().hashCode() * 127) % numPartitions;
        }
    }
    
    /*
     * 自定义GroupingComparator类，实现分区内的数据分组
     */
    @SuppressWarnings("rawtypes")
    public static class GroupingComparator extends WritableComparator{
        protected GroupingComparator(){
            super(IntPair.class, true);
        }
        
        @Override
        public int compare(WritableComparable w1, WritableComparable w2){
            IntPair ip1 = (IntPair) w1;
            IntPair ip2 = (IntPair) w2;
            String l = ip1.getFirst();
            String r = ip2.getFirst();
            return l.equals(r) ? 0 : (l.compareTo(r)<0? -1 : 1);
        }
    }
	@Override
	public int run(String[] args) throws Exception {

		 Configuration conf = getConf();
	        Job job = Job.getInstance( conf);
	        conf.set("mapreduce.output.fileoutputformat.compress", "true");
	        conf.set("mapreduce.output.fileoutputformat.compress.type", "BLOCK");
	        conf.set("mapreduce.output.fileoutputformat.compress.codec",
	            "org.apache.hadoop.io.compress.GzipCodec");
	        conf.set("io.compression.codecs",
	            "org.apache.hadoop.io.compress.GzipCodec");
	         
	        
	        String inputDir = conf.get("param.input.path");
	         String outDir = conf.get("param.out.path");
	         if(inputDir==null || inputDir.equals("")){
	        	 System.out.println("请输入输入路径：param.input.path");
	        	 System.exit(0);
	         }
	         if(outDir==null || outDir.equals("")){
	        	 System.out.println("请输入输出路径：param.out.path");
	        	 System.exit(0);
	         }
	        System.out.println("param.input.path=" + inputDir);
	        System.out.println("param.out.path=" + outDir);
	        Path outpath = new Path(outDir);
	        HadoopUtils.delete(conf, outpath);  
	        TextInputFormat.setInputPaths(job, new Path(inputDir));
	        TextOutputFormat.setOutputPath(job, new Path(outDir));
	        FileOutputFormat.setCompressOutput(job, true);
	        FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class); //设置压缩格式
	        job.setMapperClass(D3Maper.class);
	        job.setReducerClass(D3Reduce.class);
	        job.setPartitionerClass(FirstPartitioner.class);
	        job.setGroupingComparatorClass(GroupingComparator.class);
	        job.setOutputKeyClass(Text.class);
	        job.setMapOutputKeyClass(IntPair.class);
	        job.setNumReduceTasks(20);
	        job.setJarByClass(this.getClass());
	        
 
	        int state = job.waitForCompletion(true) ? 0 : 1;
	        System.out.println("error条数：" + job.getCounters().getGroup("error").findCounter("act_payoff.err").getValue());
	        System.out.println("正常或提前还款条数：" + job.getCounters().getGroup("state").findCounter("normal.records").getValue());
	        System.out.println("逾期条数：" + job.getCounters().getGroup("state").findCounter("overdue.records").getValue());
	        System.out.println("实际结清条数：" + job.getCounters().getGroup("loan").findCounter("act_payoff.records").getValue());
	        System.out.println("垫付结清条数：" + job.getCounters().getGroup("loan").findCounter("advance_payoff.records").getValue());
	        System.out.println("未结清或垫付结清条数：" + job.getCounters().getGroup("loan").findCounter("not_payoff.records").getValue());
	        
	        return state;
	}
	public static void main(String[] args) throws Exception {
		 int res = ToolRunner.run(new BHZXD3AdvanceMR(), args);
	        System.exit(res);
		
	}
	
	

}

