package com.wtw.udtf;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;

import java.util.ArrayList;
import java.util.List;

public class EventJsonUDTF extends GenericUDTF {

    @Override
    public StructObjectInspector initialize(StructObjectInspector argOIs) throws UDFArgumentException {

        // 定义UDTF返回值的类型和名称
        List<String> fieldName = new ArrayList<>();
        List<ObjectInspector> fieldType = new ArrayList<>();

        // 定义返回值的名称
        fieldName.add("event_name");
        fieldName.add("event_json");

        // 定义返回值的类型
        fieldType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
        fieldType.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);


        return ObjectInspectorFactory.getStandardStructObjectInspector(fieldName, fieldType);
    }

    @Override
    public void process(Object[] objects) throws HiveException {
        
        // 传入的内容是 BaseFieldUDF.evaluate方法的传入 et的返回值 -> JsonArray
        String input = objects[0].toString();

        // 合法性判断
        if(StringUtils.isBlank(input)) {
            return ;
        } else {
            JSONArray jsonArray = new JSONArray(input);

            // 合法性判断
            if(jsonArray == null) {
                return ;
            }

            /**
             * "et":[{"ett":"1583726726627","en":"display","kv":{"goodsid":"0","action":"2",
             * "extend1":"2","place":"0","category":"86"}},
             * {"ett":"1583822064016","en":"active_background","kv":{"active_source":"3"}},
             * {"ett":"1583744691722","en":"comment",
             * "kv":{"p_comment_id":0,"addtime":"1583753543284","praise_count":762,
             * "other_id":9,"comment_id":2,"reply_count":29,"userid":1,"content":"箔溶蟹"}}]
             */

            // 遍历数组中的每一个json 进行返回event_name, event_json
            for(int i = 0; i < jsonArray.length(); i++) {
                /**
                 {
                    "ett":"1583744691722",
                    "en":"comment",
                    "kv":{
                        "p_comment_id":0,
                        "addtime":"1583753543284",
                        "praise_count":762,
                        "other_id":9,
                        "comment_id":2,
                        "reply_count":29,
                        "userid":1,
                        "content":"箔溶蟹"
                    }
                 }
                 */

                String res[] = new String[2];
                try {
                    JSONObject jsonObject = jsonArray.getJSONObject(i);
                    // 获取到en的值
                    res[0] = jsonObject.getString("en");

                    // 获取数组中每一个json的整体
                    res[1] = jsonArray.getString(i);
                } catch (JSONException e) {
                    continue;
                }

                // 写出返回值
                forward(res);
            }
        }
    }

    @Override
    public void close() throws HiveException {

    }
}
