package com.hive.udf.my_len;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

public class MyLength extends GenericUDF {


    //  在调用函数前会先运行一遍initialize函数
    @Override
    public ObjectInspector initialize(ObjectInspector[] objectInspectors) throws UDFArgumentException {
        // 判断参数个数是否为1个
        if (objectInspectors.length != 1){
            throw new UDFArgumentException("my_len函数参数个数不正确");
        }

        ObjectInspector objectInspector = objectInspectors[0];
        // 判断参数类型是否为基本类型
        if (objectInspector.getCategory()!=ObjectInspector.Category.PRIMITIVE){
            throw new UDFArgumentException("my_len函数参数类型不正确");
        }

        // 将参数类型转换为基本类型
        PrimitiveObjectInspector argument = (PrimitiveObjectInspector) objectInspector;

        // 判断参数类型是否为String类型
        if (argument.getPrimitiveCategory()!= PrimitiveObjectInspector.PrimitiveCategory.STRING){
            throw new UDFArgumentException("my_len函数参数类型不正确");
        }

        // 返回int类型
        return PrimitiveObjectInspectorFactory.javaIntObjectInspector;
    }

    // 每处理一行数据就会调用evaluate方法
    @Override
    public Object evaluate(DeferredObject[] deferredObjects) throws HiveException {
        // 获取参数
        DeferredObject deferredObject = deferredObjects[0];

        // 获取参数值
        Object o = deferredObject.get();

        //  判断参数值是否为空
        if (o==null){
            return null;
        }
        else {
            return o.toString().length();
        }
    }

    @Override
    public String getDisplayString(String[] strings) {
        return null;
    }
}
