package com.we.hive.hook.sqlaudit;

import com.alibaba.fastjson.JSONObject;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.HookContext;
import org.apache.hadoop.security.UserGroupInformation;

import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.Date;
import java.util.Random;

public class SQLAuditHook implements ExecuteWithHookContext {
    private static final String AUDITLOGPATH = "/data/logs/hive-hook-sql-audit/";
    public static final String RECORD_SEPARATOR = "\u0001" + System.getProperty("line.separator");
    @Override
    public void run(HookContext hookContext) {

        final Log log = LogFactory.getLog(SQLAuditHook.class);

        final HookContext hc = hookContext;
        final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        final Gson gson = new GsonBuilder()
                .setPrettyPrinting()
                .disableHtmlEscaping()
                .create();
        try {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    log.info("SQL Audit Hook Class Name:  " + SQLAuditHook.class.getName());

                    QueryPlan queryPlan = hc.getQueryPlan();
                    Long queryStartTime = queryPlan.getQueryStartTime();

                    long queryTime = (queryStartTime == 0L ? System.currentTimeMillis() : queryStartTime);
                    String formatDate = sdf.format(new Date(Long.parseLong(String.valueOf(queryTime))));
                    log.info("SQL Audit Hook Query Time:  " + formatDate);

                    UserGroupInformation ugi = hc.getUgi();
                    String userName = hc.getUserName();
                    if (StringUtils.isBlank(userName)) {
                        userName = ugi.getUserName();
                    }
                    log.info("SQL Audit Hook User Name:  " + userName);

                    String querySQL = queryPlan.getQueryString();
                    log.info("SQL Audit Hook Query SQL:  " + querySQL);

                    int rand = new Random().nextInt(65535) + 1;
                    String applicationId = userName + "_" + queryTime + "_" + rand;

                    if (! querySQL.toLowerCase().contains("use ")) {
                        JSONObject jsonObject = new JSONObject();
                        jsonObject.put("applicationId", applicationId);
                        jsonObject.put("date", formatDate);
                        jsonObject.put("name", userName);
                        jsonObject.put("sql", querySQL);
                        writeHDFS(hc.getConf(), jsonObject);
                    }
                }
            }).start();
        } catch (Exception e) {
            log.error("SQL Audit Hook is exception: " + e.getMessage());
        }
    }

    private void writeHDFS(Configuration conf, JSONObject data) {
        final Log log = LogFactory.getLog(SQLAuditHook.class);
        FSDataOutputStream out = null;
        try {
            String date = LocalDate.now(ZoneId.of("Asia/Shanghai")).toString();
            String dirHDFS =  AUDITLOGPATH + date;
            Path dirPath = new Path(dirHDFS);

            FileSystem fileSystem = FileSystem.get(conf);
            if (!fileSystem.exists(dirPath)) {
                fileSystem.mkdirs(dirPath);
                if(!fileSystem.getFileStatus(dirPath).getOwner().equals("hive")) {
                    log.warn("##Warn logdir: " + dirHDFS + " Owner: " + fileSystem.getFileStatus(dirPath).getOwner());
                    fileSystem.setOwner(dirPath, "hive", "supergroup");
                }
            }

            String applicationId = data.getString("applicationId");
            Path filePath = new Path(dirHDFS + "/" + applicationId + ".txt");
            byte[] writeData = (data + RECORD_SEPARATOR).getBytes();

            out = fileSystem.create(filePath, true);
            out.write(writeData, 0, writeData.length);
            out.hsync();

        } catch (Exception e) {
            log.error("writeHDFS Exception: " + e.getMessage());
            e.printStackTrace();
        } finally {
            try {
                if(out != null) {
                    out.hsync();
                    out.close();
                }
            } catch (Exception e) {
                log.error("writeHDFS close OutPutStream Exception: " + e.getMessage());
                e.printStackTrace();
            }
        }

    }
}
