package com.atguigu;

import com.google.gson.JsonArray;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.json.JSONArray;

import java.util.Collections;
import java.util.List;

public class ExplodeJsonToObject extends GenericUDTF {
    @Override
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {
        if (argOIs.getAllStructFieldRefs().size()!=1){
            throw new UDFArgumentLengthException("传入数据长度不对");
        }
        String typeName = argOIs.getAllStructFieldRefs().get(0).getFieldObjectInspector().getTypeName();
        if (!typeName.equals("string")){
            throw new UDFArgumentTypeException(0,"输入类型必须为string");
        }
        List<String> json = Collections.singletonList("json");
        List<ObjectInspector> ins = Collections.singletonList(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        return ObjectInspectorFactory.getStandardStructObjectInspector(json,ins);
    }
    private final Object[] out = new Object[1];

    @Override
    public void process(Object[] args) throws HiveException {
        JSONArray jsonArray = new JSONArray(args[0].toString());
        for (int i = 0; i < jsonArray.length(); i++) {
            out[0] = jsonArray.get(i);
            process(out);
        }
    }


    @Override
    public void close() throws HiveException {

    }
}
