import java.util.List;
import com.google.common.collect.Lists;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

/*
 *@program: hive
 *@description: UDTF-行变列
 *@author: by song
 *@create: 2019-08-12 09:19
 */

public class ArrToMapUDTF extends GenericUDTF
{
    private String[] obj = new String[2];
    @Override
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException
    {

        //生成表的字段名数组
        List<String> colName = Lists.newLinkedList();
        colName.add("key");
        colName.add("value");
        //生成表的字段对象监控器（object inspector）数组，即生成表的行对象每个字段的类型
        List<ObjectInspector> resType = Lists.newLinkedList();
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        resType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        // 返回分别为列名 和 列类型，通过ObjectInspectorFactory.getStandardStructObjectInspector方法
        //获取到hive的值对object进行访问
        return ObjectInspectorFactory.getStandardStructObjectInspector(colName, resType);
    }
    //在udtf中实际对hive的操作，如上面获取到的是在objectinspector对象中解耦的两个列值，然后下面的函数是对
    //这两列的实际操作，通过字符串截取来将行转为列
    //process方法中的参数Object[] args，我认为应该是ObjectInspectorFactory.getStandardStructObjectInspector
    //对hive解耦后的列值
    @Override
    public void process(Object[] args) throws HiveException
    {
        if (args[0] == null)
        {
            return;
        }
        String arg1 = args[0].toString();
        String[] arr1 = arg1.split(",");
        String[] arr2 = null;

        if (args[1] != null)
        {
            arr2 = args[1].toString().split(",");
        }
        for (int i = 0; i < arr1.length; i++)
        {
            obj[0] = arr1[i];
            if (arr2 != null && arr2.length > i)
            {
                obj[1] = arr2[i];
            } else {
                obj[1] = null;
            }
            //使用本机调forward会报异常，正好可以打印出不在hive环境运行时的结果
            try
            {
                forward(obj);
            }catch (Exception e)
            {
                System.out.println("***********本机调试***********");
                System.out.println(obj[0]+" "+obj[1]);
            }


        }
    }

    @Override
    public void close() throws HiveException { }


    public static void main(String[] args) throws HiveException
    {
        Object[] arg0 = new Object[2];
        arg0[0]="a,s,d";
        arg0[1]="1,2";
        ArrToMapUDTF arr=new ArrToMapUDTF();
        arr.process(arg0);
    }

}