package com.shujia.hive;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

public class MyGenericUDFToUpper extends GenericUDF {
    // 将传入的字符串转成大写
    @Override
    // 主要在initialize方法中完成类型的校验
    public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
        // 对自定义函数参数长度的校验
        if (arguments.length != 1) {
            throw new UDFArgumentLengthException("Input Args LengthError!!!");
        }

        // 对传入的参数类型的校验
        if (!arguments[0].getCategory().equals(ObjectInspector.Category.PRIMITIVE)) {
            throw new UDFArgumentTypeException(0, "Input Args TypeError!!!");
        }
        // 指定自定义函数返回值的类型
        return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    }

    @Override
    public Object evaluate(DeferredObject[] arguments) throws HiveException {
        if (arguments[0].get() == null) {
            return "Error";
        }
        return arguments[0].get().toString().toUpperCase();
    }

    @Override
    public String getDisplayString(String[] children) {
        return "";
    }
}
