package com.feidee.data.report.util.SparkMonitor;

import ch.ethz.ssh2.Connection;
import com.feidee.data.report.constant.Constant;
import com.feidee.data.report.util.ToolUtils;
import com.feidee.data.report.util.mail.RMailSender;
import com.feidee.fdfalcon.falcon.FalconAlarmService;
import io.netty.util.HashedWheelTimer;
import io.netty.util.Timeout;
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import org.apache.log4j.Logger;

import java.sql.SQLException;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * 监控SQL运行时间，如果spark SQL运行超时则重启环境，如果hive运行超时则关闭连接
 */
public class CheckSqlStatus {

    private static Logger logger = Logger.getLogger(CheckSqlStatus.class);
    private static Timer timer = new HashedWheelTimer();

    public static void startSparkMonitor(final int curSparkConNum, final String runningKey) {
        timer.newTimeout(new TimerTask() {
            @Override
            public void run(Timeout timeout) throws Exception {
                if (Constant.RUNNING_SQL.get(curSparkConNum).contains(runningKey)) {
                    // 清空运行标志，避免重复重启服务
                    Constant.RUNNING_SQL.get(curSparkConNum).clear();
                    logger.info("正在运行的SQL: " + runningKey + " 已超时！");
                    rebootThrift(curSparkConNum);
                }
            }
        }, Constant.SPARK_THRIFT_OVERTIME, TimeUnit.MINUTES);
    }

    private static void rebootThrift(int curSparkConNum) {
        Map<String, String> value = Constant.SPARK_THRIFT.get(curSparkConNum);
        String host = value.get("host");
        String username = value.get("username");
        String password = value.get("password");
        String cmd = value.get("cmd");
        try {
            logger.info("开始重启服务…………");
            Connection login = RemoteCommandUtil.login(host, username, password);
            RemoteCommandUtil.execute(login, cmd);
            logger.info("Spark Thrift 服务: " + host + " 重启成功！");
            logger.info("重启服务完成！");
            FalconAlarmService.sendAlarm("Spark Thrift 服务: " + host + " 于 " + ToolUtils.getTimeStr()
                    + " 重启成功！", "menhu");
        } catch (Exception e) {
            logger.error("Spark Thrift 服务: " + host + " 重启失败！");
            FalconAlarmService.sendAlarm("Spark Thrift 服务: " + host + " 于 " + ToolUtils.getTimeStr()
                    + " 重启失败！", "menhu");
        }
    }

    public static void startHiveMonitor(final String reportKey, final String reportIndex, final java.sql.Connection
            conn) throws Exception {

        if (Constant.ALLOW_OVERTIME_SQL.containsKey(reportKey) && Constant.ALLOW_OVERTIME_SQL.get(reportKey)
                .contains(reportIndex)) {
            logger.info("报表: " + reportKey + "_" + reportIndex + " 允许超时执行，无需启动 Hive SQL 超时监控!");
        } else {
            logger.info("报表: " + reportKey + "_" + reportIndex + " 启动 Hive SQL 超时监控!");
            timer.newTimeout(timeout -> {
                if (!conn.isClosed()) {
                    logger.info("正在运行的SQL: " + reportKey + "_" + reportIndex + " 已超时！");
                    conn.close();
                    logger.info("正在运行的SQL: " + reportKey + "_" + reportIndex + " 关闭连接成功！");
                    RMailSender.sendOvertimeSqlToHandler(reportKey, reportIndex);
                    throw new Exception("Hive SQL运行超时!");
                }
            }, Constant.HIVE_RUN_OVERTIME, TimeUnit.MINUTES);
        }
    }
}
