package cn.xiaolang.common.utils;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.Arrays;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * @author fuhw/Dean
 * @date 2018-09-28
 */
public final class ShellUtils {

    private ShellUtils() {
    }

    public static void main(String[] args) {
        exeCmd("ls -l");
    }

    private static final Logger log = LoggerFactory.getLogger(ShellUtils.class);

    public static void execSqlByJar(String sql, Long labelId, String host, int port, String username, String password) {
        log.info("exec-sql:{}", sql);
        if (sql == null || "".equals(sql) || labelId == null) {
            throw new RuntimeException("the params must be not null");
        }
        exeCmd("rm -rf ./exeJar.sh");
        exeCmd("rm -rf ./tbs_delimit_sqlexecutor-1.0.1.jar");
        exeCmd("hdfs dfs -get /user/hongwei/sqlMapping/exeJar.sh");
        exeCmd("hdfs dfs -get /user/hongwei/sqlMapping/tbs_delimit_sqlexecutor-1.0.1.jar");
        exeCmd("chmod +x exeJar.sh");
        String paramStr = "./exeJar.sh " +
                "\"" + sql + "\"" +
                "\"" + labelId + "\"" +
                "\"" + host + "\"" +
                "\"" + port + "\"" +
                "\"" + username + "\"" +
                "\"" + password + "\"";
        String[] cmd = {"/bin/sh", "-c", paramStr};
        exeCmd(Arrays.toString(cmd));
    }

    private static String readBufferedSteam(BufferedReader input) throws IOException {
        StringBuilder builder = new StringBuilder();
        String line;
        while ((line = input.readLine()) != null) {
            builder.append(line).append("\n");
        }
        return builder.toString();
    }

    /**
     * <ul>
     * spark-sql --master yarn --name HiveSqlExecutorShell --queue etl-hdp
     * --executor-java_memory 4G --total-executor-cores 8 -e "$1"
     * </ul>
     * <p>
     * example:
     * </p>
     * <ul>
     * spark-sql --master yarn --name HiveSqlExecutorShell --queue etl-hdp
     * --executor-java_memory 4G --total-executor-cores 8 -e "use liyou_test_db;show
     * tables;"
     * </ul>
     *
     * @param sql SQL script
     */
    public static void execSqlBySpark(String sql) {
        log.info("exec-sql:{}", sql);
        if (sql == null || "".equals(sql)) {
            return;
        }
        exeCmd("rm -rf ./exeSql.sh");
        exeCmd("hdfs dfs -get /user/hongwei/sqlMapping/exeSql.sh");
        exeCmd("chmod +x exeSql.sh");
        String[] cmd = {"/bin/sh", "-c", "./exeSql.sh '" + sql + "'"};
        exeCmd(Arrays.toString(cmd));
    }

    /**
     * @param command shell script command
     */
    public static void exeCmd(String command) {
        if (command == null || "".equals(command)) {
            return;
        }
        Process pro = null;
        Runtime runTime = Runtime.getRuntime();
        BufferedReader error = null;
        BufferedReader normal = null;
        try {
            pro = runTime.exec(command);
            pro.waitFor();
            error = new BufferedReader(new InputStreamReader(pro.getErrorStream()));
            String errorInfo = readBufferedSteam(error);
            if (errorInfo.length() > 0) {
                log.error("exec [{}] error: {}", command, errorInfo);
            } else {
                normal = new BufferedReader(new InputStreamReader(pro.getInputStream()));
                String result = readBufferedSteam(normal);
                log.info("exec [{}] response: {}", command, result.trim());
            }
        } catch (InterruptedException e) {
            log.error("exec [{}] ex:", command, e);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (error != null) {
                try {
                    error.close();
                } catch (Exception e) {
                    log.error("close error stream failed,ex:", e);
                }
            }
            if (normal != null) {
                try {
                    normal.close();
                } catch (Exception e) {
                    log.error("close normal stream failed,ex:", e);
                }
            }
            if (pro != null) {
                try {
                    pro.destroy();
                } catch (Exception e) {
                    log.error("destroy Process failed,ex:", e);
                }
            }
        }
    }

}