package com.atbeijing.udf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import java.util.Arrays;

/**
 * 字符串首字母替换为X
 */
public class StringBeginUDF extends GenericUDF {


    /**
     * 检查输入参数的类型和个数
     * @param arguments 输入参数的类型信息
     * @return 函数输出结果的类型
     * @throws UDFArgumentException
     */
    public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
        //1. 限制参数个数为1
        if (arguments.length != 1) {
            throw new UDFArgumentLengthException("参数个数只能为1个！");
        }

        //2. 最好还要限制参数类型为string
        if (!arguments[0].getTypeName().equals("string")) {
            throw new UDFArgumentTypeException(0, "参数类型只能为String！");
        }

        //3. 返回自定义函的返回值类型StringInspector
        return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    }

    /**
     * 逻辑实现方法
     * @param arguments 输入参数
     * @return 输出结果
     * @throws HiveException
     */
    public Object evaluate(DeferredObject[] arguments) throws HiveException {
        DeferredObject argument = arguments[0];
        //懒加载,这里要先初始化对象
        Object o = argument.get();
        String input = o.toString();
        if (input == null)
            return null;
        else if (input.length() == 0)
            return "";
        else
            return "X" + input.substring(1);
    }

    public String getDisplayString(String[] children) {
        return Arrays.toString(children);
    }
}
