package com.udf.hive.udaf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.*;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.LongWritable;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

/**
 * 自定义 UDAF 聚合函数
 * <p>
 * 需求：
 * input:
 * a1:5 c1:10
 * a2:3 c2:40
 * a3:8 c3:100
 * output:
 * (a1 * c1 + a2 * c2 + a3 * c3) / (c1 + c2 + c3) 类似于加权平均数
 *
 * @author huadong.qi
 * @date 2020/11/11
 */
public class UDAFDemo1 extends AbstractGenericUDAFResolver {

    /**
     * 参数个数
     */
    private static final int PARAM_COUNT = 2;

    @Override
    public GenericUDAFEvaluator getEvaluator(TypeInfo[] info) throws SemanticException {
        // 判断传入参数的个数
        if (info.length != PARAM_COUNT) {
            throw new UDFArgumentTypeException(info.length - 1,
                    "Exactly two argument is expected.");
        }

        // 判断传入的参数类型
        if (info[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(0,
                    "Only primitive type arguments are accepted but "
                            + info[0].getTypeName() + " is passed.");
        }

        // 进一步判断传入的参数
        switch (((PrimitiveTypeInfo) info[0]).getPrimitiveCategory()) {
            case INT:
            case LONG:
                return new GenericUDAFAverageEvaluator();
            case BYTE:
            case SHORT:
            case FLOAT:
            case DOUBLE:
            case STRING:
            case TIMESTAMP:
            case BOOLEAN:
            default:
                throw new UDFArgumentTypeException(0,
                        "Only int or long type arguments are accepted but "
                                + info[0].getTypeName() + " is passed.");
        }
    }

    public static class GenericUDAFAverageEvaluator extends GenericUDAFEvaluator {
        /**
         * 创建变量存储中间结果
         * input:每一步执行时传入的参数
         * output:每一步执行时输出的结果数据的类型
         * input和output都只是指定的输入输出的数据类型而已,和数据计算本身无关
         * fullAggregationResult是聚合的结果的数据，和用于particial2和final阶段的结果输出，根据不同的业务要求指定不同的类型等
         */
        private PrimitiveObjectInspector inputOI1;
        private PrimitiveObjectInspector inputOI2;
        private LongWritable fullAggregationResult;

        /**
         * output For terminatePartial
         */
        Object[] partialAggregationResult;

        /**
         * input For merge
         */
        StructObjectInspector soi;
        StructField countField;
        StructField sumField;
        LongObjectInspector countFieldOI;
        LongObjectInspector sumFieldOI;

        /**
         * 对各个阶段都会首先调用一下该方法,并且对输入输出数据初始化
         * Mode:
         * partial1 : map阶段(从原始数据到部分数据聚合)                                   会调用 init -> iterate -> partialterminate
         * partial2 : combiner阶段(负责在map端合并map的数据::从部分数据聚合到部分数据聚合)  会调用 init -> merge -> partialterminate
         * final    : reduce阶段(从部分数据的聚合到完全聚合)                              会调用 init -> merge -> terminate
         * complete : 只有map没有reduce阶段(map端就直接出结果了:从原始数据直接到完全聚合)   会调用 init -> iterate -> terminate
         */
        @Override
        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
            super.init(m, parameters);

            // 将输入参数赋值给input输入变量
            if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
                inputOI1 = (PrimitiveObjectInspector) parameters[0];
                inputOI2 = (PrimitiveObjectInspector) parameters[1];
            } else {
                //部分数据作为输入参数时，用到的struct的OI实例，指定输入数据类型，用于解析数据
                soi = (StructObjectInspector) parameters[0];
                countField = soi.getStructFieldRef("count");
                sumField = soi.getStructFieldRef("sum");
                //数组中的每个数据，需要其各自的基本类型OI实例解析
                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
                sumFieldOI = (LongObjectInspector) sumField.getFieldObjectInspector();
            }
            // 返回中间聚合，或最终结果的数据类型
            if (m == Mode.PARTIAL1 || m == Mode.PARTIAL2) {
                partialAggregationResult = new Object[2];
                partialAggregationResult[0] = new LongWritable(0);
                partialAggregationResult[1] = new LongWritable(0);
                /*
                 * 构造Struct的OI实例，用于设定聚合结果数组的类型
                 * 需要字段名List和字段类型List作为参数来构造
                 */
                List<String> fname = new ArrayList<>();
                fname.add("count");
                fname.add("sum");
                List<ObjectInspector> foi = new ArrayList<>();
                //注：此处的两个OI类型 描述的是 partialResult[] 的两个类型，故需一致
                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
            } else {
                //FINAL COMPLETE 最终聚合结果为一个数值，并用基本类型OI设定其类型
                fullAggregationResult = new LongWritable(0);
                return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
            }
        }

        /**
         * 聚合数据缓存存储结构
         */
        static class MyAggregationBuffer extends AbstractAggregationBuffer {
            long count;
            long sum;
        }

        @Override
        public AbstractAggregationBuffer getNewAggregationBuffer() throws HiveException {
            MyAggregationBuffer result = new MyAggregationBuffer();
            reset(result);
            return result;
        }

        @Override
        public void reset(AggregationBuffer agg) throws HiveException {
            MyAggregationBuffer buffer = new MyAggregationBuffer();
            buffer.count = 0;
            buffer.sum = 0;
        }

        /**
         * 遍历原始数据(将一行数据（Object[] parameters）放入聚合buffer中)
         *
         * @param agg
         * @param parameters
         * @throws HiveException
         */
        @Override
        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
            Object p1 = parameters[0];
            Object p2 = parameters[1];
            if (!Objects.isNull(p1) && !Objects.isNull(p2)) {
                MyAggregationBuffer buffer = (MyAggregationBuffer) agg;
                try {
                    long avg = PrimitiveObjectInspectorUtils.getLong(p1, inputOI1);
                    long count = PrimitiveObjectInspectorUtils.getLong(p2, inputOI2);
                    buffer.count += count;
                    buffer.sum += avg * count;
                } catch (NumberFormatException e) {
                    throw new HiveException("NumberFormatException：get value failed");
                }
            }
        }

        /**
         * 得到部分聚合结果
         *
         * @param agg
         * @return
         * @throws HiveException
         */
        @Override
        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
            MyAggregationBuffer myAgg = (MyAggregationBuffer) agg;
            ((LongWritable) partialAggregationResult[0]).set(myAgg.count);
            ((LongWritable) partialAggregationResult[1]).set(myAgg.sum);
            return myAgg;
        }

        /**
         * 合并部分聚合结果 (注：Object[] 是 Object 的子类，此处 partial 为 Object[]数组)
         *
         * @param agg
         * @param partial
         * @throws HiveException
         */
        @Override
        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
            if (Objects.isNull(partial)) {
                return;
            }
            MyAggregationBuffer myAgg = (MyAggregationBuffer) agg;
            // 通过StandardStructObjectInspector实例，分解出partial数组元素值
            Object partialCount = soi.getStructFieldData(partial, countField);
            Object partialSum = soi.getStructFieldData(partial, sumField);
            // 通过基本数据类型的OI实例解析Object的值
            myAgg.count = countFieldOI.get(partialCount);
            myAgg.sum = sumFieldOI.get(partialSum);
        }

        /**
         * 得到最终聚合结果
         *
         * @param agg
         * @return
         * @throws HiveException
         */
        @Override
        public Object terminate(AggregationBuffer agg) throws HiveException {
            MyAggregationBuffer myAgg = (MyAggregationBuffer) agg;
            if (myAgg.count == 0) {
                return null;
            } else {
                fullAggregationResult.set(myAgg.sum / myAgg.count);
                return fullAggregationResult;
            }
        }

    }

}
