package cn.getech.data.development.utils.hadoop;

import cn.getech.data.intelligence.common.utils.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Times;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @description: 解析Hadoop日志
 * @author: wangzhaowen：kiss
 * @create: 2020/9/21 19:42
 * @version: 2020/9/21
 **/
public class HadoopLogUtils {
    private static LogCLIHelpers logCLIHelpers;
    private static final Logger LOGGER = LoggerFactory.getLogger(HadoopLogUtils.class);

    private static String defaultFS = "hdfs://bigdata-test-2:8020";

    private static String errAndWarnAndInfoStr = "ERROR,error,Error,Exception,exception,EXCEPTION,WARN,warn,Warn,Warning,INFO";
    private static String errStr = "ERROR,error,Error,Exception,exception,EXCEPTION";
    private static String warnStr = "WARN,warn,Warn,Warning";
    private static String infoStr = "INFO";

    public static void setDefaultFS(String defaultFS) {
        HadoopLogUtils.defaultFS = "hdfs://"+defaultFS;
    }

    public static String printLog(String applicationId, String owner) {
        //  consoleUrl:http://bigdata-test-4:8088/proxy/application_1600347239206_35664/
        /**
         * 根据applicationId 获取containerId 和 nodeId 其中appOwner可能是租户，
         * 也可能是代理用户，根据业务自行设置，本地测试的时候我并没有创建hadoop user,
         * 所以直接给root
         */
        Map<String, String> map = null;
        try {
            map = HadoopLogUtils.getContaines(applicationId, owner);
        } catch (IOException e) {
            LOGGER.warn("获取日志失败");
            return null;
        }
        /**
         * 根据 containeId 和 nodeId 拿到日志 输出到控制台或者写入文件
         */
        AtomicInteger count = new AtomicInteger(0);
        StringBuilder log = new StringBuilder();
        map.forEach((containeId, nodeId) -> {
            OutputStream in = null;
            String temp = FileUtils.getTempPath()+"/";
            try {
                File file = new File(temp + containeId + "_" + count.get() + ".txt");
                if (!file.exists()) {
                    file.createNewFile();
                }
                in = new FileOutputStream(file);
                //如果不写入文件 直接使用 PrintStream printStream = System.out
                PrintStream printStream = new PrintStream(in);
                List<String> logType = new ArrayList<>(1);
                //hadoop 2.X 日志类型有 stdout stderr 3.X 有stdout gc.current
                try {
                    HadoopLogUtils.dumpAContainersLogs(applicationId,
                            containeId,
                            nodeId,
                            owner, printStream);
                    String s = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
                    log.append(s);
                } catch (IOException e) {
                    LOGGER.warn("错误1:{}", e.getMessage());
                }
                count.getAndIncrement();
            } catch (IOException e) {
                LOGGER.warn("目录:{}", temp);
                LOGGER.warn("错误2:{}", e.getMessage());
            }

        });
        return handleLog(log.toString());
    }



    public static String handleLog(String log){
        if (log.isEmpty())
            return log;
        log = log.replaceAll("\r\n", "<br/>")
                .replaceAll("\t", "&nbsp;&nbsp;&nbsp;&nbsp;")
                .replaceAll("\n", "<br/>");

        String[] logArr = log.split("<br/>");
        for (int i = 0; i < logArr.length; i++) {
            String logStr = logArr[i];
            if (logStr.length() < 4)
                continue;
            for (String str : errAndWarnAndInfoStr.split(",")) {
                while(logStr.contains(str)){//只要该str字符串中有匹配的元素，才进行以下操作
                    if (infoStr.contains(str)) {
                        String[] infos = logStr.split("INFO");
                        if (infos.length > 1 && isTime(infos[0]))
                            logStr = "<span style=\"color:#09A120;\">" + infos[0] + "</span> INFO " + infos[1];
                    }
                    if (warnStr.contains(str))
                        logStr = "<span style=\"color:#FAAE14;\">" + logStr + "</span>";
                    else if (errStr.contains(str))
                        logStr = "<span style=\"color:#F14500;font-weight:600;\">" + logStr + "</span>";
                    logArr[i] = logStr;
                    break;
                }
            }
        }

        return StringUtils.join(logArr,"<br/>");
    }

    public static boolean isTime(String str){
        try{
            SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            format.parse(str);
            return true;
        } catch (ParseException e) {
            return false;
        }
    }


    public static Configuration getYarnConfiguration() {
        Configuration yarnConfiguration = new YarnConfiguration();
        yarnConfiguration.set("dfs.client.use.datanode.hostname", "true");
        yarnConfiguration.set("dfs.socket.timeout", "6000000");
        yarnConfiguration.set("dfs.permissions", "false");
        yarnConfiguration.set("fs.defaultFS", defaultFS);
        yarnConfiguration.setClass("fs.file.impl", LocalFileSystem.class, FileSystem.class);
        logCLIHelpers = new LogCLIHelpers();
        logCLIHelpers.setConf(yarnConfiguration);
        return yarnConfiguration;
    }

    /**
     * 获取容器日志
     *
     * @param appId
     * @param containerId
     * @param nodeId
     * @param jobOwner
     * @param out
     * @return
     * @throws IOException
     */
    public static int dumpAContainersLogs(String appId, String containerId, String nodeId, String jobOwner, PrintStream out) throws IOException {
        Path remoteRootLogDir = new Path(getYarnConfiguration().get("yarn.nodemanager.remote-app-log-dir", "/tmp/logs"));

        String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getYarnConfiguration());
        Path remoteAppLogDir = LogAggregationUtils.getRemoteAppLogDir(remoteRootLogDir, ConverterUtils.toApplicationId(appId), jobOwner, suffix);

        RemoteIterator nodeFiles;
        try {
            Path qualifiedLogDir = FileContext.getFileContext(getYarnConfiguration()).makeQualified(remoteAppLogDir);
            nodeFiles = FileContext.getFileContext(qualifiedLogDir.toUri(), getYarnConfiguration()).listStatus(remoteAppLogDir);
        } catch (FileNotFoundException var16) {
            var16.printStackTrace();
            logDirNotExist(remoteAppLogDir.toString());
            return -1;
        }

        boolean foundContainerLogs = false;

        while (nodeFiles.hasNext()) {
            FileStatus thisNodeFile = (FileStatus) nodeFiles.next();
            String fileName = thisNodeFile.getPath().getName();
            if (fileName.contains(LogAggregationUtils.getNodeString(nodeId)) && !fileName.endsWith(".tmp")) {
                AggregatedLogFormat.LogReader reader = null;

                try {
                    reader = new AggregatedLogFormat.LogReader(getYarnConfiguration(), thisNodeFile.getPath());
                    if (dumpAContainerLogs(containerId, reader, out, thisNodeFile.getModificationTime()) > -1) {
                        foundContainerLogs = true;
                    }
                } finally {
                    if (reader != null) {
                        reader.close();
                    }

                }
            }
        }

        if (!foundContainerLogs) {
            containerLogNotFound(containerId);
            return -1;
        } else {
            return 0;
        }
    }

    private static void logDirNotExist(String remoteAppLogDir) {
        LOGGER.info(remoteAppLogDir + " does not exist.");
        LOGGER.info("Log aggregation has not completed or is not enabled.");
    }

    private static void containerLogNotFound(String containerId) {
        LOGGER.info("Logs for container " + containerId + " are not present in this log-file.");
    }

    public static int dumpAContainerLogs(String containerIdStr, AggregatedLogFormat.LogReader reader, PrintStream out, long logUploadedTime) throws IOException {
        AggregatedLogFormat.LogKey key = new AggregatedLogFormat.LogKey();

        DataInputStream valueStream;
        for (valueStream = reader.next(key); valueStream != null && !key.toString().equals(containerIdStr); valueStream = reader.next(key)) {
            key = new AggregatedLogFormat.LogKey();
        }

        if (valueStream == null) {
            return -1;
        } else {
            boolean foundContainerLogs = false;

            while (true) {
                try {
                    readContainerLogs(valueStream, out, logUploadedTime);
                    foundContainerLogs = true;
                } catch (EOFException var10) {
                    if (foundContainerLogs) {
                        return 0;
                    }

                    return -1;
                }
            }
        }
    }


    /**
     * 获取Containe nodeId列表
     *
     * @param appId
     * @param appOwner
     * @return
     * @throws IOException
     */
    public static Map<String, String> getContaines(String appId, String appOwner) throws IOException {
        Configuration yarnConfiguration = getYarnConfiguration();
        Path remoteRootLogDir = new Path(yarnConfiguration.get(
                YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
                YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
        String user = appOwner;
        String logDirSuffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(yarnConfiguration);
        // TODO Change this to get a list of files from the LAS.
        Path remoteAppLogDir = LogAggregationUtils.getRemoteAppLogDir(
                remoteRootLogDir, ConverterUtils.toApplicationId(appId), user, logDirSuffix);
        RemoteIterator<FileStatus> nodeFiles;
        Map<String, String> containerAndNodeId = new LinkedHashMap<>();
        try {
            Path qualifiedLogDir =
                    FileContext.getFileContext(yarnConfiguration).makeQualified(remoteAppLogDir);
            nodeFiles = FileContext.getFileContext(qualifiedLogDir.toUri(),
                    yarnConfiguration).listStatus(remoteAppLogDir);
        } catch (Exception fnf) {
            logDirNotExist(remoteAppLogDir.toString());
            return Collections.emptyMap();
        }
        boolean foundAnyLogs = false;
        while (nodeFiles.hasNext()) {
            FileStatus thisNodeFile = nodeFiles.next();
            if (!thisNodeFile.getPath().getName()
                    .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) {
                AggregatedLogFormat.LogReader reader =
                        new AggregatedLogFormat.LogReader(yarnConfiguration, thisNodeFile.getPath());
                try {

                    DataInputStream valueStream;
                    AggregatedLogFormat.LogKey key = new AggregatedLogFormat.LogKey();
                    valueStream = reader.next(key);

                    while (valueStream != null) {
                        // Container: container_1587284642166_0001_01_000003 on master_42757
                        containerAndNodeId.put(key.toString(), thisNodeFile.getPath().getName().replace("_", ":"));

                        foundAnyLogs = true;
                        // Next container
                        key = new AggregatedLogFormat.LogKey();
                        valueStream = reader.next(key);
                    }
                } finally {
                    reader.close();
                }
            }
        }
        if (!foundAnyLogs) {
            emptyLogDir(remoteAppLogDir.toString());
            return Collections.emptyMap();
        }
        return containerAndNodeId;
    }


    private static void emptyLogDir(String remoteAppLogDir) {
        LOGGER.info(remoteAppLogDir + " does not have any log files.");
    }


    private static void readContainerLogs(DataInputStream valueStream,
                                          PrintStream out, long logUploadedTime) throws IOException {
        byte[] buf = new byte[65535];
        String fileType = valueStream.readUTF();
        String fileLengthStr = valueStream.readUTF();
        long fileLength = Long.parseLong(fileLengthStr);
        out.print("LogType:");
        out.println(fileType);
        if (logUploadedTime != -1) {
            out.print("Log Upload Time:");
            out.println(Times.format(logUploadedTime));
        }
        out.print("LogLength:");
        out.println(fileLengthStr);
        out.println("Log Contents:");

        long curRead = 0;
        long pendingRead = fileLength - curRead;
        int toRead =
                pendingRead > buf.length ? buf.length : (int) pendingRead;
        int len = valueStream.read(buf, 0, toRead);
        while (len != -1 && curRead < fileLength) {
            out.write(buf, 0, len);
            curRead += len;

            pendingRead = fileLength - curRead;
            toRead =
                    pendingRead > buf.length ? buf.length : (int) pendingRead;
            len = valueStream.read(buf, 0, toRead);
        }
        out.println("End of LogType:" + fileType);
        out.println("");
    }

}
