package hive_function;

import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.json.JSONArray;

// 代码是hive用的，我们不执行，所以继承
public class ExplodeEvent extends GenericUDTF {
    @Override
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {
        return super.initialize(argOIs);
    }

    String[] arr = new String[2];
    @Override
    public void process(Object[] args) throws HiveException {
        String etStr = args[0].toString();
        // 一个可以转换json数组的字符串，可以通过下面的操作进行转换
        JSONArray jsonArray = new JSONArray(etStr);

        for (int i = 0; i < jsonArray.length(); i++) {
            String eventJson = jsonArray.getString(i);
            String eventName = jsonArray.getJSONObject(i).getString("en");
            arr[0] = eventJson;
            arr[1] = eventName;
            forward(arr);
        }
    }

    @Override
    public void close() throws HiveException {

    }
}
