package com.yzy.custom.udtf;


import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

import java.util.ArrayList;

public class MyUDTFDemo1 extends GenericUDTF {

    @Override//指定输出的列的名称，以及列的类型
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {
        //创建一个List集合存储结果的列
        ArrayList<String> colNames= new ArrayList<>();
        //创建一个集合存储每一列的数据类型
        ArrayList<ObjectInspector> colTypes = new ArrayList<>();
        //向集合中添加元素，设置列的名字以及类型
        colNames.add("id");
        colTypes.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        colNames.add("name");
        colTypes.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        colNames.add("cardId");
        colTypes.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        //创建一个对象，该对象封装了列的名字以及列的类型
        return ObjectInspectorFactory.getStandardStructObjectInspector(colNames,colTypes);
    }
    /*
    process方法主要使用在加载函数的时候，底层会进行调用处理传入的列的数据
    M1001#xiaohu#S324231212,sp#M1002#S2543412432,S21312312412#M1003#dyj
    args:接收函数调用时传入的列数据，从索引0开始
    1001 xiaohu 324231212

     */

    @Override
    public void process(Object[] args) throws HiveException {



    }

    @Override
    public void close() throws HiveException {

    }
}
