/*
 * Copyright 2012 LinkedIn Corp.
 *
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
 * use this file except in compliance with the License. You may obtain a copy of
 * the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */

package com.kingsoft.dc.khaos.plugin.core.utils;

import com.kingsoft.dc.khaos.plugin.core.constants.KhaosContants;
import com.ksyun.kbdp.dts.common.utils.Props;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.Logger;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;


public class HadoopJobUtils {
    // the regex to look for while looking for application id's in the hadoop log
    public static final Pattern APPLICATION_ID_PATTERN = Pattern
            .compile("^(application_\\d+_\\d+).*");


    public static void proxyUserKillAllSpawnedHadoopJobs(final String logFilePath, Props mergedProps, final Logger log) {
        Properties prop = new Properties();
        prop.putAll(mergedProps.getFlattened());
        try {
            Boolean shouldProxy = Boolean.valueOf(prop.getProperty(KhaosContants.ENABLE_PROXYING));
            String resourceConfDir = prop.getProperty(KhaosContants.RESOURCE_CONF_DIR);
            Path yarnResourcePath = new Path(resourceConfDir == null ? "" : resourceConfDir + "/yarn-site.xml");
            log.info("resourceConfDir:" + resourceConfDir);
            log.info("yarnResourcePath:" + yarnResourcePath.toString());
            if (shouldProxy) {
                String proxyUser = prop.getProperty(KhaosContants.PROXY_USER);
                String proxyKrb5Conf = prop.getProperty(KhaosContants.PROXY_KRB5_CONF);
                String proxyKeytabLocation = prop.getProperty(KhaosContants.PROXY_KEYTAB_LOCATION);
                SecurityUtils.hadoopKerberosAuth(proxyUser, proxyKrb5Conf, proxyKeytabLocation);
                HadoopJobUtils.killAllSpawnedHadoopJobs(logFilePath, yarnResourcePath, log);
            } else {
                HadoopJobUtils.killAllSpawnedHadoopJobs(logFilePath, yarnResourcePath, log);
            }
        } catch (Throwable t) {
            log.warn("something happened while trying to kill all spawned jobs", t);
        }
    }

    /**
     * Pass in a log file, this method will find all the hadoop jobs it has launched, and kills it
     * <p>
     * Only works with Hadoop2
     *
     * @param logFilePath
     * @param log
     * @return a Set<String>. The set will contain the applicationIds that this job tried to kill.
     */
    public static Set<String> killAllSpawnedHadoopJobs(String logFilePath, Path resourcePath, Logger log) {
        Set<String> allSpawnedJobs = findApplicationIdFromLog(logFilePath, log);
        log.info("applicationIds to kill: " + allSpawnedJobs);

        for (String appId : allSpawnedJobs) {
            try {
                killJobOnCluster(appId, resourcePath, log);
            } catch (Throwable t) {
                log.warn("something happened while trying to kill this job: " + appId, t);
            }
        }

        return allSpawnedJobs;
    }

    /**
     * <pre>
     * Takes in a log file, will grep every line to look for the application_id pattern.
     * If it finds multiple, it will return all of them, de-duped (this is possible in the case of pig jobs)
     * This can be used in conjunction with the @killJobOnCluster method in this file.
     * </pre>
     *
     * @param logFilePath
     * @return a Set. May be empty, but will never be null
     */
    public static Set<String> findApplicationIdFromLog(String logFilePath, Logger log) {

        File logFile = new File(logFilePath);

        if (!logFile.exists()) {
            throw new IllegalArgumentException("the logFilePath does not exist: " + logFilePath);
        }
        if (!logFile.isFile()) {
            throw new IllegalArgumentException("the logFilePath specified  is not a valid file: "
                    + logFilePath);
        }
        if (!logFile.canRead()) {
            throw new IllegalArgumentException("unable to read the logFilePath specified: " + logFilePath);
        }

        BufferedReader br = null;
        Set<String> applicationIds = new HashSet<String>();

        try {
            br = new BufferedReader(new FileReader(logFile));
            String line;

            // finds all the application IDs
            while ((line = br.readLine()) != null) {
                String[] inputs = line.split("\\s");
                if (inputs != null) {
                    for (String input : inputs) {
                        Matcher m = APPLICATION_ID_PATTERN.matcher(input);
                        if (m.find()) {
                            String appId = m.group(1);
                            applicationIds.add(appId);
                        }
                    }
                }
            }
        } catch (IOException e) {
            log.error("Error while trying to find applicationId for log", e);
        } finally {
            try {
                if (br != null)
                    br.close();
            } catch (Exception e) {
                // do nothing
            }
        }
        return applicationIds;
    }

    /**
     * <pre>
     * Uses YarnClient to kill the job on HDFS.
     * Using JobClient only works partially:
     *   If yarn container has started but spark job haven't, it will kill
     *   If spark job has started, the cancel will hang until the spark job is complete
     *   If the spark job is complete, it will return immediately, with a job not found on job tracker
     * </pre>
     *
     * @param applicationId
     * @throws IOException //     * @throws YarnException
     */
    public static void killJobOnCluster(String applicationId, Path resourcePath, Logger log) throws YarnException, IOException {
        YarnClient yarnClient = null;
        try {
            YarnConfiguration yarnConf = new YarnConfiguration();
            yarnConf.addResource(resourcePath);
            yarnClient = YarnClient.createYarnClient();
            yarnClient.init(yarnConf);
            yarnClient.start();
            ApplicationId appId = ConverterUtils.toApplicationId(applicationId);
            log.info("start klling application: " + applicationId);
            yarnClient.killApplication(appId);
            log.info("successfully killed application: " + applicationId);
        } catch (YarnException e) {
            e.printStackTrace();
            log.warn("killed application: " + applicationId + " failed! " + e.getMessage());
            throw new YarnException(e.getMessage());
        } catch (IOException e) {
            e.printStackTrace();
            log.warn("killed application: " + applicationId + " failed! " + e.getMessage());
            throw new IOException(e.getMessage());
        } finally {
            if (yarnClient != null) {
                yarnClient.stop();
            }
        }


    }

    /**
     * <pre>
     * constructions a javaOpts string based on the Props, and the key given, will return
     *  String.format("-D%s=%s", key, value);
     * </pre>
     *
     * @param props
     * @param key
     * @return will return String.format("-D%s=%s", key, value). Throws RuntimeException if props not
     * present
     */
    public static String javaOptStringFromAzkabanProps(Props props, String key) {
        String value = props.get(key);
        if (value == null) {
            throw new RuntimeException(String.format("Cannot find property [%s], in azkaban props: [%s]",
                    key, value));
        }
        return String.format("-D%s=%s", key, value);
    }

    /**
     * Filter a collection of String commands to match a whitelist regex and not match a blacklist
     * regex.
     *
     * @param command        Collection of commands to be filtered
     * @param whitelistRegex whitelist regex to work as inclusion criteria
     * @param blacklistRegex blacklist regex to work as exclusion criteria
     * @param log            logger to report violation
     * @return filtered list of matching. Empty list if no command match all the criteria.
     */
    public static String filterCommands(String command, String whitelistRegex,
                                        String blacklistRegex, Logger log) {
        List<String> filteredCommands = new LinkedList<String>();
        Pattern whitelistPattern = Pattern.compile(whitelistRegex);
        Pattern blacklistPattern = Pattern.compile(blacklistRegex);
//        for (String command : commands) {
        if (whitelistPattern.matcher(command).matches()
                && !blacklistPattern.matcher(command).matches()) {
            filteredCommands.add(command);
        } else {
            log.warn(String.format("Removing restricted command: %s", command));
        }
//        }
        return filteredCommands.iterator().next();
    }

    /**
     * <pre>
     * constructions a javaOpts string based on the Props, and the key given, will return
     *  String.format("-D%s=%s", key, value);
     * </pre>
     * <p>
     * //     * @param conf
     * //     * @param key
     *
     * @return will return String.format("-D%s=%s", key, value). Throws RuntimeException if props not
     * present
     */
    public static String resolvedJarWithAbsolutePathSpec(String baseDir, String unresolvedJarSpec,
                                                         Logger log) {

        if (unresolvedJarSpec == null || unresolvedJarSpec.isEmpty()) return "";
        StringBuilder resolvedJarSpec = new StringBuilder();

        String[] unresolvedJarSpecList = unresolvedJarSpec.split(",");
        for (String s : unresolvedJarSpecList) {
            if (!s.startsWith(File.separator)) {
                s = baseDir + File.separator + s;
            }
            resolvedJarSpec.append(s).append(",");
        }
        // remove the trailing comma
        int lastCharIndex = resolvedJarSpec.length() - 1;
        if (lastCharIndex >= 0 && resolvedJarSpec.charAt(lastCharIndex) == ',') {
            resolvedJarSpec.deleteCharAt(lastCharIndex);
        }

        return resolvedJarSpec.toString();
    }

}
