package com.apex.spark.transform;

import com.alibaba.fastjson.JSONObject;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import io.krakens.grok.api.Grok;
import io.krakens.grok.api.GrokCompiler;
import io.krakens.grok.api.Match;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Map;

/**
 * 非结构化数据处理程序入口
 */
public class GrokFilter implements SparkTransform {
    private final Logger logger = LoggerFactory.getLogger(GrokFilter.class);
    //配置文件管理
    private Config config;
    //正则文件路径,分布式环境下必须保证集群路径相同
    private String grokPatternPath;
    //正则表达式
    private String pattern;

    /**
     * 数据处理方法
     * @param env 上下文件执行环境
     * @param data 源端数据
     * @return
     */
    @Override
    public Dataset<Row> process(SparkEnvironment env, Dataset<Row> data) {

        data.printSchema();

       JavaRDD<String> jsonRDD = data.toJavaRDD().map(new Function<Row, String>() {
            @Override
            public String call(Row row) throws Exception {
                //返回结果json
                JSONObject jsonObject = new JSONObject();
                //实例化一个解析规则
                GrokCompiler grokCompiler = GrokCompiler.newInstance();
                //组测默认解析规则68个解析规则
                grokCompiler.registerDefaultPatterns();
                Grok grok = grokCompiler.compile(pattern);
                //解析日志数据
                Match grokMatch = grok.match(row.mkString());
                Map<String, Object> resultMap = grokMatch.capture();
                //将结果转换为json返回
                for (Map.Entry<String,Object> entry :resultMap.entrySet()){
                    jsonObject.put(entry.getKey(),entry.getValue());
                }
                //错误信息记录
                if (jsonObject.isEmpty()){
                    logger.error("grok error message : "+row.mkString() + "\nerror pattern: " + pattern);
                }
                return jsonObject.toJSONString();
            }
        }).filter(new Function<String, Boolean>() {
           @Override
           public Boolean call(String s) throws Exception {
               if (!StringUtils.isBlank(s)) {
                   JSONObject jsonObject = JSONObject.parseObject(s);
                   if (jsonObject.isEmpty()){
                       return false;
                   }else return !(jsonObject.containsValue("null") | jsonObject.containsKey("null"));
               }else {
                   return false;
               }
           }
       });
        return env.getSparkSession()
                .read()
                .option("timestampFormat","yyyy/MM/dd HH:mm:ss ZZ")
                .json(jsonRDD);
    }

    /**
     *
     * @param plugin 执行环境
     */
    @Override
    public void prepare(SparkEnvironment plugin) {
        if (config.hasPath(ConfigKeyName.GROK_PATTERN_PATH)){
            grokPatternPath = config.getString(ConfigKeyName.GROK_PATTERN_PATH);
            pattern = config.getString(ConfigKeyName.GROK_PATTERN);
        }
        try{
            //读取外部写好的正则表达式
            InputStream inputStream = new FileInputStream(grokPatternPath);
            //实例化一个解析规则
            GrokCompiler grokCompiler = GrokCompiler.newInstance();
            //组测默认解析规则68个解析规则
            grokCompiler.registerDefaultPatterns();
            //组测用户自定义表达式
            grokCompiler.register(inputStream);
            //获取所有已注册的表达式
            Map<String,String> patternDefinitions = grokCompiler.getPatternDefinitions();
            //匹配表达式规则编译
            String [] patterns = pattern.split(",");
            StringBuilder builderPattern = new StringBuilder();
            int count = 0;
            for (String s : patterns) {
                builderPattern.append(patternDefinitions.get(s));
                count++;
                if (count != patterns.length) {
                    builderPattern.append(" ");
                }
            }
            pattern = builderPattern.toString();

        }catch (Exception e){
            e.printStackTrace();
        }
    }

    /**
     *
     * @return config
     */
    @Override
    public Config getConfig() {
        return config;
    }

    /**
     *
     * @param config config
     */
    @Override
    public void setConfig(Config config) {
        this.config = config;
    }
}
