package com.xzx.ecommercedw.hiveudf;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.json.JSONException;
import org.json.JSONObject;

/**
 * even_log的通用数据解析
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2024/9/24 17:35
 */
public class EventLogBaseFiledUDF extends UDF {

    /**
     * udf 固定的这个方法名及参数列表
     * @param line 1726791125692|{"ap":"app","cm":{"mid":"96","uid":"96","vc":"16","vn":"1.1.7","l":"en","sr":"O","os":"8.0.5","ar":"MX","md":"sumsung-9","ba":"Sumsung","sv":"V2.7.3","g":"SM3YIU13@gmail.com","hw":"640*960","t":"1726762072819","nw":"3G","ln":"-105.1","la":"-27.3"},"et":[{"ett":"1726838519609","en":"display","kv":{"action":"2","goodsid":"24","place":"4","extend1":"1","category":"77"}}]}
     * @param key .
     * @return .
     * @throws JSONException .
     */
    public String evaluate(String line, String key) throws JSONException {

        // 1按"\\|"对日志line进行切割
        String[] log = line.split("\\|");

        // 2 合法性校验
        if (log.length != 2 || StringUtils.isBlank(log[1])) {
            return "";
        }

        // 3 开始处理JSON
        JSONObject baseJson = new JSONObject(log[1].trim());

        String result = "";

        // 4 根据传进来的key，查找对应的value值
        if ("et".equals(key)) {
            if (baseJson.has("et")) {
                result = baseJson.getString("et");// et 继续返回json字符串不做处理
            }
        } else if ("st".equals(key)) {
            result = log[0].trim();
        } else {
            JSONObject cm = baseJson.getJSONObject("cm");
            if (cm.has(key)) {
                result = cm.getString(key);
            }
        }

        return result;
    }
}
