/**
 * 
 */
package com.rrd.hive.udaf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.*;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.IntWritable;

import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;


/** 
 * @author baoxin.liu
 * @version 创建时间：2022年10月14日 上午10:44:43
 * 根据传入的参数，以Map方式返回该列的值类型，str or num，以及该列空值【包括'','none'等】的占比，数量最多的枚举值的占比等
 * 
 */
public class FieldCountRateUDAF extends AbstractGenericUDAFResolver {
    public  static  final  Pattern pattern = Pattern.compile("-?[0-9]+(\\.[0-9]+E?[0-9]*)?");
	@Override
	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
			throws SemanticException {
		if (parameters.length == 1) {
			return new AmortizeAmtEvaluator();

		} else {
			throw new UDFArgumentTypeException(parameters.length, "输入参数不正确!");
		}

	}

	public static class AmortizeAmtEvaluator extends GenericUDAFEvaluator {
		private PrimitiveObjectInspector P_InputOI;

		private StandardMapObjectInspector mapOI;
		

		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
			super.init(m, parameters);
			if (m == Mode.PARTIAL1) {
                this.P_InputOI = (PrimitiveObjectInspector) parameters[0];

                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                        ObjectInspectorUtils.getStandardObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector)
                    );
            }  else if (m == Mode.PARTIAL2) {
                this.mapOI = (StandardMapObjectInspector) parameters[0];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                        PrimitiveObjectInspectorFactory.javaIntObjectInspector );
            } else if (m == Mode.FINAL) {
                this.mapOI = (StandardMapObjectInspector) parameters[0];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector );
            } else if (m == Mode.COMPLETE) {
                this.P_InputOI = (PrimitiveObjectInspector) parameters[0];
                return ObjectInspectorFactory.getStandardMapObjectInspector(
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector,
                        PrimitiveObjectInspectorFactory.javaStringObjectInspector);
            } else { //
                throw new RuntimeException("no such mode Exception");
            }
		}
		static class ResultAgg extends AbstractAggregationBuffer {
            Map<String, Integer> map = new HashMap<>();
	     }
		@Override
        public AbstractAggregationBuffer getNewAggregationBuffer()
                throws HiveException {
            // TODO Auto-generated method stub
			AbstractAggregationBuffer aggr = new ResultAgg();
            this.reset(aggr);
            return aggr;
        }
	
        @Override
        public void reset(AggregationBuffer agg) throws HiveException {
            // TODO Auto-generated method stub
            ((ResultAgg) agg).map = new HashMap<>();
        }
        @Override
        public void iterate(AggregationBuffer agg, Object[] parameters)
                throws HiveException {
            // TODO Auto-generated method stub
//            if (parameters == null || parameters[0] == null) {
//                return;
//            }

            ResultAgg rag = (ResultAgg) agg;
            
            String key = PrimitiveObjectInspectorUtils.getString(parameters[0],
                this.P_InputOI);
            if(key==null || key.toLowerCase().equals("null") || key.toLowerCase().trim().equals("") || key.toLowerCase().trim().equals("none")){
                key="null_counter";
            }
            if(rag.map.containsKey(key)){
                rag.map.put(key,rag.map.get(key)+1);
            }else{
                rag.map.put(key, 1);
            }
        }
        @Override
        public void merge(AggregationBuffer agg, Object partial)
                throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg aggr = (ResultAgg) agg;

            if (partial != null) {
                Map partialResult = this.mapOI.getMap(partial);
                for (Object key : partialResult.keySet()) {
                    if(aggr.map.containsKey(key.toString())){
                        aggr.map.put(key.toString(),aggr.map.get(key.toString())+((IntWritable)partialResult.get(key)).get());
                    }else{
                        aggr.map.put(key.toString(),((IntWritable)partialResult.get(key)).get());
                    }
                }
            }
        }
        @Override
        public Object terminatePartial(AggregationBuffer agg)
                throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg myagg = (ResultAgg) agg;
            Map<String, Integer> ret = new HashMap<>(myagg.map);

            return ret;
        }
        @Override
        public Object terminate(AggregationBuffer agg) throws HiveException {
            // TODO Auto-generated method stub
            ResultAgg aggr = (ResultAgg) agg;
            if (aggr.map == null || aggr.map.size() == 0) {
                return null;
            }
            Map<String,Integer> mp = aggr.map;
            int total=0;
            Map.Entry<String,Integer> max_en=null;
            int null_counter = 0;
            Map<String,String> result = new HashMap<>();
            Matcher matcher = null;
            String column_type="num";
            for(Map.Entry<String,Integer> en:mp.entrySet()){
                total += en.getValue();
                if(en.getKey().equals("null_counter")){
                    null_counter = en.getValue();
                    result.put(en.getKey(),String.valueOf(en.getValue()));
                    continue;
                }
                if(column_type.equals("num") && !en.getKey().equals("null_counter")){
                    matcher = pattern.matcher(en.getKey());
                    if (!matcher.matches()){
                        result.put("type_value",en.getKey());
                        column_type="str";
                    }
                }
                if(max_en==null || en.getValue()>max_en.getValue()){
                    max_en = en;
                }

            }
            result.put("total_counter",String.valueOf(total));
            if(max_en!=null){
                result.put("max_counter_key",max_en.getKey());
                result.put("max_counter_val",String.valueOf(max_en.getValue()));
                result.put("max_rat",String.valueOf(1.0*max_en.getValue()/total));
                result.put("colum_type",column_type);
            }

            result.put("null_rat",String.valueOf(1.0*null_counter/total));

            return result;
        }


	}

}
