package com.sui.bigdata.rtcadmin.service.impl;


import com.sui.bigdata.rtcadmin.constant.AppConstant;
import com.sui.bigdata.rtcadmin.constant.JobConstant;
import com.sui.bigdata.rtcadmin.exception.SparkApplicationException;
import com.sui.bigdata.rtcadmin.exception.SparkConfException;
import com.sui.bigdata.rtcadmin.model.dto.SparkAppDto;
import com.sui.bigdata.rtcadmin.model.vo.CommonResponse;
import com.sui.bigdata.rtcadmin.repository.mapper.AppConfMapper;
import com.sui.bigdata.rtcadmin.repository.mapper.JobStatusMapper;
import com.sui.bigdata.rtcadmin.repository.model.SparkApp;
import com.sui.bigdata.rtcadmin.service.SparkAppService;
import com.sui.bigdata.rtcadmin.util.SparkArgumentsUtil;
import com.sui.bigdata.rtcadmin.util.YarnUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import scala.App;


/**
 * @Author: TingWuHuang
 * @Date: 2020/5/6 15:30
 * @description
 */
@Service
public class SparkAppServiceImpl implements SparkAppService {

    private Logger logger = LoggerFactory.getLogger(SparkAppServiceImpl.class);

    @Autowired
    private AppConfMapper appConfMapper;

    @Autowired
    private JobStatusMapper jobStatusMapper;

    @Autowired
    private YarnUtil yarnUtil;



    @Override
    public CommonResponse action(String action, SparkAppDto sparkApp) throws SparkApplicationException {
        switch (action){
            case AppConstant.APP_ACTION_SUBMIT:
                return CommonResponse.Builder.buildSuccess(applicationSubmit(sparkApp));
            case AppConstant.APP_ACTION_RESTART:
                return CommonResponse.Builder.buildSuccess(applicationRestart(sparkApp));
            case AppConstant.APP_ACTION_STOP:
                return CommonResponse.Builder.buildSuccess(applicationStop(sparkApp.getAppName()));
            case AppConstant.APP_ACTION_STATUS:
                return CommonResponse.Builder.buildSuccess(yarnUtil.applicationsStatus(sparkApp.getAppId()));
            default:
                throw new SparkApplicationException("action not support！");
        }

    }

    public SparkAppDto applicationSubmit(SparkAppDto sparkAppDto) throws SparkApplicationException {
        // 数据校验
        if(StringUtils.isEmpty(sparkAppDto.getAppName())) {
            throw new SparkConfException("appName value is invalid !");
        }
        if(StringUtils.isEmpty(sparkAppDto.getDeveloper())){
            throw new SparkConfException("developger value is invalid !");
        }
        if(StringUtils.isEmpty(sparkAppDto.getAppResource()) || StringUtils.isEmpty(sparkAppDto.getMainClass())){
            throw new SparkConfException("spark appResource or mainClass value is invalid !");
        }
        // 先根据appName判断是否有正在运行的任务
        SparkApp curSparkApp = appConfMapper.selectByAppName(sparkAppDto.getAppName());
        if(curSparkApp != null){
            if (AppConstant.APP_STATUS_RUNNING.equals(curSparkApp.getAppStatus()) ||
                    AppConstant.APP_STATUS_ACCEPT.equals(curSparkApp.getAppStatus())){
                throw new SparkConfException(sparkAppDto.getAppName() + " is running,update is not support!");
            }
        }

        yarnUtil.luncherSubmit(sparkAppDto);
        // 保存元数据信息
        updateAppConfig(sparkAppDto);

        return sparkAppDto;
    }



    public boolean applicationStop(String appName) throws SparkApplicationException {
        if (StringUtils.isEmpty(appName)){
            throw new SparkApplicationException("appName value is invalid !");
        }
        SparkApp sparkApp = appConfMapper.selectByAppName(appName);
        if(sparkApp == null || !AppConstant.APP_STATUS_RUNNING.equals(sparkApp.getAppStatus())){
            throw new SparkApplicationException( appName + " is not running");
        }
        yarnUtil.applicationStop(sparkApp.getAppId());
        appConfMapper.updateStatus(appName, AppConstant.APP_STATUS_CANCELED);
        jobStatusMapper.updateStatus(appName, sparkApp.getAppId(), AppConstant.APP_STATUS_CANCELED);
        return true;
    }

    @Override
    public SparkAppDto applicationRestart(SparkAppDto sparkAppDto) throws SparkApplicationException {
        if (StringUtils.isEmpty(sparkAppDto.getAppName())){
            throw new SparkApplicationException("parameters of  appName value is invalid !");
        }
        SparkApp oldSparkApp = appConfMapper.selectByAppName(sparkAppDto.getAppName());
        if (oldSparkApp == null){
            throw new SparkApplicationException(sparkAppDto.getAppName() + " is not exist!");
        }
        String oldSparkAppId = oldSparkApp.getAppId();
        SparkAppDto newSparkApp = SparkArgumentsUtil.buildRestartConf(sparkAppDto, oldSparkApp);
        yarnUtil.luncherSubmit(newSparkApp);
        try {
            // 尝试将原来的任务给kill掉
            if(oldSparkAppId != null){
                yarnUtil.applicationStop(oldSparkAppId);
            }
        }catch (Exception e){
            logger.warn("kill {} failed ,maybe has been killed or finish", oldSparkAppId);
        }
        newSparkApp.setAppStatus(AppConstant.APP_STATUS_ACCEPT);
        updateAppConfig(newSparkApp);

        return newSparkApp;
    }


    public void updateAppConfig(SparkAppDto sparkAppDto){
        appConfMapper.saveAll(SparkApp.buildSparkApp(sparkAppDto));
        jobStatusMapper.updateSubmitedJob(sparkAppDto.getAppName(), sparkAppDto.getDeveloper(),sparkAppDto.getAppId(),
                "", sparkAppDto.getAppStatus(), JobConstant.ENGINE_SPARK);
    }







}
