/**
 * 
 */
package com.rrd.hive.udaf;

import java.util.HashMap;
import java.util.Map;

import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;


/** 
 * @author baoxin.liu
 * @version 创建时间：2017年02月13日 上午10:44:43 
 * 根据传入的参数，形成以第一个参数为Key，第二参数为value的map数据，有相同key 值进行累计
 * 
 */
public class ActAmortizeAmtUDAF extends AbstractGenericUDAFResolver {
	
	@Override
	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
			throws SemanticException {
		if (parameters.length == 5) {
			return new AmortizeAmtEvaluator();

		} else {
			throw new UDFArgumentTypeException(parameters.length, "输入参数不正确!");
		}

	}

	public static class AmortizeAmtEvaluator extends GenericUDAFEvaluator {
		private PrimitiveObjectInspector P_InputOI; // 期号
		private PrimitiveObjectInspector G_InputOI1; //实际每个月还款
		private PrimitiveObjectInspector H_InputOI2; // 实收与应收累计差异 
		private PrimitiveObjectInspector I_InputOI3; // 累计理论摊销本金
		private PrimitiveObjectInspector K_InputOI4; // 本金占比
		private StandardMapObjectInspector mapOI;
		

		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
			super.init(m, parameters);
			if (m == Mode.PARTIAL1) {
                this.P_InputOI = (PrimitiveObjectInspector) parameters[0];
                this.G_InputOI1 = (PrimitiveObjectInspector) parameters[1];
                this.H_InputOI2 = (PrimitiveObjectInspector) parameters[2];
                this.I_InputOI3 = (PrimitiveObjectInspector) parameters[3];
                this.K_InputOI4 = (PrimitiveObjectInspector) parameters[4];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                    ObjectInspectorUtils.getStandardObjectInspector(
                        PrimitiveObjectInspectorFactory.javaIntObjectInspector),
                    PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            } else if (m == Mode.PARTIAL2) {
                this.mapOI = (StandardMapObjectInspector) parameters[0];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                    PrimitiveObjectInspectorFactory.javaIntObjectInspector,
                    PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            } else if (m == Mode.FINAL) {
                this.mapOI = (StandardMapObjectInspector) parameters[0];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaIntObjectInspector,
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            } else if (m == Mode.COMPLETE) {
                
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaIntObjectInspector,
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            } else { //
                throw new RuntimeException("no such mode Exception");
            }
		}
		static class ResultAgg extends AbstractAggregationBuffer {
	            Map<Integer, String> map = new HashMap<>();
	     }
		@Override
        public AbstractAggregationBuffer getNewAggregationBuffer()
                throws HiveException {
            // TODO Auto-generated method stub
			AbstractAggregationBuffer aggr = new ResultAgg();
            this.reset(aggr);
            return aggr;
        }
	
        @Override
        public void reset(AggregationBuffer agg) throws HiveException {
            // TODO Auto-generated method stub
            ((ResultAgg) agg).map = new HashMap<>();

        }
        @Override
        public void iterate(AggregationBuffer agg, Object[] parameters)
                throws HiveException {
            // TODO Auto-generated method stub
            if (parameters == null || parameters[0] == null) {
                return;
            }

            ResultAgg rag = (ResultAgg) agg;
            
            int key = PrimitiveObjectInspectorUtils.getInt(parameters[0],
                this.P_InputOI);
            
            String v= PrimitiveObjectInspectorUtils.getString(parameters[1], this.G_InputOI1)
            		+","+PrimitiveObjectInspectorUtils.getString(parameters[2], this.H_InputOI2)
            		+","+PrimitiveObjectInspectorUtils.getString(parameters[3], this.I_InputOI3)
            		+","+PrimitiveObjectInspectorUtils.getString(parameters[4], this.K_InputOI4);
            		
            rag.map.put(key, v);
        }
        @Override
        public Object terminatePartial(AggregationBuffer agg)
                throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg myagg = (ResultAgg) agg;
            Map<Integer, String> ret = new HashMap<>(myagg.map);

            return ret;
        }
        @Override
        public Object terminate(AggregationBuffer agg) throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg aggr = (ResultAgg) agg;
            if (aggr.map == null || aggr.map.size() == 0) {
                // System.out.println("result is null");
                return null;
            }
            Map<Integer,String> mp = aggr.map;
            Map<Integer,Text> map =new HashMap<Integer,Text>();
            int n=10000;
            String[] arr =null;
            double g=0;
            double h=0;
            double i=0;
            double k=0;
            double l=0;
            double s=0;
            double ihk=0;
            for(int j=1;j<n;j++){
            	if(mp.get(j)!=null){
            		arr=mp.get(j).split(",");
            		g=Double.parseDouble(arr[0]);
            		h=Double.parseDouble(arr[1]);
            		i=Double.parseDouble(arr[2]);
            		k=Double.parseDouble(arr[3]);
            		
            		if(g==0){
            			l=0;
            			ihk=0;
            		}else{
            			ihk=i-h*k;
            			if(j==1){
                			l=i-h*k;
                			s=l;
                		}else{
                			//二期及以上
                			l=i-h*k-s;
                			s=s+l;
                		}
            			
            		}
            		map.put(j, new Text(String.valueOf(l)));
            		
            	}else{
            		break;
            	}
            }
           
            return map;
        }
        @Override
        public void merge(AggregationBuffer agg, Object partial)
                throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg aggr = (ResultAgg) agg;
            if (partial != null) { // 在没有输入数据情况下，null会传进来
                Map partialResult = this.mapOI.getMap(partial);
                for (Object key : partialResult.keySet()) {
                    aggr.map.put(Integer.parseInt(key.toString()),
                        partialResult.get(key).toString());

                }
            }
        }

	}

}
