package com.bigdata.udf;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.util.UUID;

/**
 * 一进一出：进来一个字符串，出去一个拼接UUID的字符串
 */
public class MyUDF extends GenericUDF {

    public static void main(String[] args) {
        System.out.println(UUID.randomUUID());
    }

    @Override
    public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
        // 判断函数参数的个数
        if (arguments.length != 1) {
            throw new UDFArgumentLengthException("输入的参数num有误！！");
        }
        // 判断输出的类型是否正确，如果不正确直接抛出异常
        ObjectInspector argument = arguments[0];
        // 类型匹配的话，返回字符串类型的标识
        if (!argument.getCategory().equals(ObjectInspector.Category.PRIMITIVE)) {
            throw new UDFArgumentTypeException(1, "数据类型不匹配！！");
        }

        return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    }


    @Override
    public Object evaluate(DeferredObject[] arguments) throws HiveException {
        String param = arguments[0].get().toString();
        // 拼接UUID并返回
        return param + UUID.randomUUID();
    }

    // 当前自定义函数的描述，不能是null，如果是null的话会报空指针异常
    @Override
    public String getDisplayString(String[] children) {
        return "这是我的第一个自定义函数";
    }
}
