package com.sui.bigdata.rtcadmin.util;


import com.sui.bigdata.flink.table.client.JobClient;
import com.sui.bigdata.rtcadmin.async.SparkAppAlarm;
import com.sui.bigdata.rtcadmin.constant.AppConstant;
import com.sui.bigdata.rtcadmin.model.dto.SparkAppDto;
import com.sui.bigdata.rtcadmin.repository.mapper.AppConfMapper;
import com.sui.bigdata.rtcadmin.repository.model.SparkApp;
import com.sui.bigdata.rtcadmin.service.SparkAppService;
import com.sui.bigdata.rtcadmin.timing.Crontab;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.util.List;

import static com.sui.bigdata.rtcadmin.util.SendMsgUtils.sendYarnDisconnectMsg;

/**
 * @Author: TingWuHuang
 * @Date: 2020/5/6 15:30
 * @description
 */
@Component
public class SparkAlarmUtil {

    Logger logger = LoggerFactory.getLogger(SparkAlarmUtil.class);

    private static String DISCONNECT = "DISCONNECT";

    @Value("${alarm.msg.admin.url}")
    private  String alarmMsgAdminUrl;

    @Autowired
    private  RedisUtils redisUtils;

    @Autowired
    SparkAppService sparkAppService;
    @Autowired
    AppConfMapper appConfMapper;


    public void scanApplicationStatus(List<SparkApp> sparkApps, String yarnConfPath, SparkAppAlarm sparkAppAlarm){

        for (SparkApp sparkApp : sparkApps){
            if (StringUtils.isEmpty(sparkApp.getAppId())){
                continue;
            }

            YarnApplicationState yarnStatus = null;
            try {
                try {
                    yarnStatus = (YarnApplicationState)JobClient.getYarnStatus(sparkApp.getAppId(), yarnConfPath).get(0);
                }catch (ApplicationNotFoundException e1){
                    yarnStatus =  YarnApplicationState.KILLED;
                    logger.error("spark " + "：" + sparkApp.getAppId() + " not found exception .");
                }catch (YarnException e2){
                    sparkApp.setAppStatus(DISCONNECT);
                    if(redisUtils.setScheduler("yarnDisconnect","yarnDisconnect",24*60*60L)){
                        sendYarnDisconnectMsg(alarmMsgAdminUrl);
                    }
                    logger.error(" yarn disconnect: ",e2);
                    continue;
                }
                switch (yarnStatus){
                    case KILLED:
                        String curSparkStatus = appConfMapper.selectByAppName(sparkApp.getAppName()).getAppStatus();
                        if(!AppConstant.APP_STATUS_CANCELED.equals(curSparkStatus)){
                            Crontab.threadPool.execute(SparkAppAlarm.build(sparkAppAlarm, sparkApp, yarnStatus.toString()));
                            sparkApp.setAppStatus(AppConstant.APP_STATUS_KILLED);
                        }
                        break;
                    case FAILED:
                        Crontab.threadPool.execute(SparkAppAlarm.build(sparkAppAlarm, sparkApp, yarnStatus.toString()));
                        sparkApp.setAppStatus(AppConstant.APP_STATUS_FAILED);
                        break;
                    case FINISHED:
                        sparkApp.setAppStatus(AppConstant.APP_STATUS_FINISH);
                        if(AppConstant.SPARK_APPLICATION_STREAMING.equals(sparkApp.getAppStyle())){
                            Crontab.threadPool.execute(SparkAppAlarm.build(sparkAppAlarm, sparkApp, yarnStatus.toString()));
                        }
                        break;
                    case RUNNING:
                        sparkApp.setAppStatus(AppConstant.APP_STATUS_RUNNING);
                        break;
                    case ACCEPTED:
                        sparkApp.setAppStatus(AppConstant.APP_STATUS_ACCEPT);
                        break;
                    default:
                }
            } catch (Exception e) {
                logger.error("use yarnClient to get ApplicationReport error:{}", e);
            }
        }
    }

    public void restartJob(String jobName, String alarmUrl, SparkApp sparkApp, String trackingUrl, String jobStatus) {
        try {
            for (int i = 0; i < 3; i++) {
                try {
                    logger.warn(" {} retry restart at {} time.", jobName, i + 1);
                    sparkAppService.applicationRestart(SparkAppDto.buildSparkAppDto(sparkApp));
                    SendMsgUtils.sendSparkAppRestartSuccessMsg(alarmUrl, jobName, jobStatus,sparkApp.getAppId());
                    break;
                } catch (Exception e) {
                    if (i == 2) {
                        throw e;
                    }
                    Thread.sleep(2000);
                }
            }
        } catch (Exception e) {
            SendMsgUtils.sendSparkAppRestartFailMsg(alarmUrl, jobName, sparkApp.getDeveloper(), jobStatus, trackingUrl);
            logger.error(" {} change failed to restart fail after 3 times.", jobName,e);
        }
    }



}
