package cn.lsh.hive.udaf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;

/**
 * 聚合函数中的几个过程模式 Mode：
 * 1、PARTIAL1（阶段1：map）：init() --> iterate() --> terminatePartial()
 * 2、PARTIAL2（阶段2：combine）：init() --> merge() --> terminatePartial()
 * 3、FINAL （最终阶段：reduce）：init() --> merge() --> terminate()
 * 4、COMPLETE（直接输出阶段：只有map）：init() --> iterate() --> terminate()
 * 注意，每个阶段都会执行init()初始化操作。
 */
public class ChaCountUDAF extends AbstractGenericUDAFResolver {

	@Override
	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
		//该方法会根据sql传人的参数数据格式指定调用哪个Evaluator进行处理
		if (parameters.length != 1) {
			throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
		}
		PrimitiveObjectInspector inputOI = (PrimitiveObjectInspector) TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
		if (inputOI.getCategory() != ObjectInspector.Category.PRIMITIVE) {
			throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
		}
		switch (inputOI.getPrimitiveCategory()) {
		case BYTE:
		case SHORT:
		case INT:
		case LONG:
		case FLOAT:
		case DOUBLE:
		case STRING:
			//前面的类型都返回ChaCountUDAFEvaluator
			return new ChaCountUDAFEvaluator();
		case TIMESTAMP:
		case BOOLEAN:
		default:
			throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
		}
	}

	/**
	 * 自定义静态内部类：数据处理类，继承GenericUDAFEvaluator抽象类
	 * init(): 确定各个阶段输入输出参数的数据格式ObjectInspectors
	 * getNewAggregationBuffer(): 保存数据聚集结果的类
	 * reset(): 重置聚集结果
	 * iterate(): map阶段，迭代处理输入sql传过来的列数据
	 * terminatePartial(): map与combiner结束返回结果，得到部分数据聚集结果
	 * merge(): combiner合并map返回的结果，还有reducer合并mapper或combiner返回的结果
	 * terminate(): reducer阶段，输出最终结果
	 */
	public static class ChaCountUDAFEvaluator extends GenericUDAFEvaluator {

		/**@step1 定义全局输入输出数据的类型OI实例，用于解析输入输出数据*/

		/**
		 * input For PARTIAL1 and COMPLETE
		 */
		PrimitiveObjectInspector inputOI;

		ObjectInspector ouputOI;

		PrimitiveObjectInspector integerOI;

		/**字符总数*/
		int total = 0;


		@Override
		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
			//初始化：对各个模式处理过程，提取输入数据类型OI，返回输出数据类型OI
			//断言，不满足条件，程序停止执行
			assert parameters.length == 1;
			super.init(m, parameters);

			if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
				//map阶段读取sql列，输入为String基础数据格式
				inputOI = (PrimitiveObjectInspector) parameters[0];
			} else {
				//其余阶段，输入为Integer基础数据格式
				integerOI = (PrimitiveObjectInspector) parameters[0];
			}
			// 指定各个阶段输出数据格式都为Integer类型
			ouputOI = ObjectInspectorFactory.getReflectionObjectInspector(Integer.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
			return ouputOI;
		}

		@Override
		public AggregationBuffer getNewAggregationBuffer() throws HiveException {
			return new ChaSumAgg();
		}

		@Override
		public void reset(AggregationBuffer agg) throws HiveException {
			ChaSumAgg sumAgg = (ChaSumAgg) agg;
			sumAgg.sum = 0;
		}

		@Override
		public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
			assert parameters.length == 1;
			//迭代计算每个字段的字符长度
			if (parameters[0] != null) {
				ChaSumAgg sumAgg = (ChaSumAgg) agg;
				String p1 = PrimitiveObjectInspectorUtils.getString(parameters[0], inputOI);
				sumAgg.add(p1.length());
			}
		}

		@Override
		public Object terminatePartial(AggregationBuffer agg) throws HiveException {
			//计算所有字段的总字符长度
			ChaSumAgg sumAgg = (ChaSumAgg) agg;
			total += sumAgg.sum;
			return total;
		}

		@Override
		public void merge(AggregationBuffer agg, Object partial) throws HiveException {
			//combiner合并map返回的结果
			if (partial != null) {
				ChaSumAgg sumAgg = (ChaSumAgg) agg;
				int partialSum = PrimitiveObjectInspectorUtils.getInt(partial, integerOI);
				sumAgg.add(partialSum);
			}
		}

		@Override
		public Object terminate(AggregationBuffer agg) throws HiveException {
			//reduce阶段输出最终结果
			ChaSumAgg sumAgg = (ChaSumAgg) agg;
			total = sumAgg.sum;
			return total;
		}

		/**
		 * 存储当前字符总数的类
		 * AggregationBuffer 允许我们保存中间结果，通过定义我们的buffer，我们可以处理任何格式的数据
		 */
		static class ChaSumAgg implements AggregationBuffer {
			int sum = 0;

			void add(int num) {
				sum += num;
			}
		}
	}
}
