package com.shujia;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.objectinspector.*;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;

import java.util.ArrayList;
import java.util.List;

public class MyUDTF extends GenericUDTF {

    /*
        select myexplode('hello,hello,hello',',') =>
        words
        hello
        hello
        hello

        create temporary function myexplode as 'com.shujia.MyUDTF';

        select
        name
        ,view.types
        from movies lateral view myexplode(type,'/') view as types

     */

    /**
     * 在该函数中可以对传入的参数进行类型及长度判断，同时可以对返回值类型进行限定
     * @param argOIs
     * @return
     */
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {
        List<? extends StructField> allStructFieldRefs = argOIs.getAllStructFieldRefs();
        if (allStructFieldRefs.size() != 2) {
            throw new UDFArgumentLengthException("该函数要求传入两个参数，一个是带有分隔符的字符串 另一个参数是分隔符");
        }

        // 一个 StructField 可以代表是一个字段的类型
        for (StructField allStructFieldRef : allStructFieldRefs) {
            ObjectInspector fieldObjectInspector = allStructFieldRef.getFieldObjectInspector();

            if (!fieldObjectInspector.getCategory().equals(ObjectInspector.Category.PRIMITIVE)) {
                throw new UDFArgumentTypeException(allStructFieldRef.getFieldID(),"该函数要求传入两个字符串类型");
            }
        }

        // StructObjectInspector

        // 返回的列名称为 words 其类型为 String
        ArrayList<String> fieldNames = new ArrayList<String>();
        ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
        fieldNames.add("words");
        fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);

        return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);

    }

    /**
     *  处理传入的数据
     *      对于该函数没有返回值类型 所以返回数据时 需要使用 forward 返回一行数据
     * @param args  hive函数调用时的传参
     *          object array of arguments
     * @throws HiveException
     */
    @Override
    public void process(Object[] args) throws HiveException {
        String str = ((Text)args[0]).toString() ;
        String splitStr = ((Text)args[1]).toString() ;

        String[] split = str.split(splitStr);
        // 每次执行都会返回一行数据
        String[] outOneLine = new String[1];

        for (String s : split) {
            outOneLine[0] = s;
            forward(outOneLine);
        }
    }

    @Override
    public void close() throws HiveException {
//        System.out.println("");
    }
}
