package com.sui.bigdata.rtcadmin.util;

import com.sui.bigdata.flink.table.client.JobClient;
import com.sui.bigdata.flink.table.client.SqlClient;
import com.sui.bigdata.rtcadmin.constant.JobConstant;
import com.sui.bigdata.rtcadmin.dto.JobConfigDto;
import com.sui.bigdata.rtcadmin.exception.*;
import com.sui.bigdata.rtcadmin.repository.mapper.JobConfigMapper;
import com.sui.bigdata.rtcadmin.repository.mapper.JobStatusMapper;
import com.sui.bigdata.rtcadmin.repository.model.JobConfig;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.table.client.gateway.SqlExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.util.*;
import java.util.stream.Collectors;


/**
 * @author YongChen
 * @date 2019/12/2 11:11
 * @description
 * @email yong_chen@sui.com
 */
@Component
public class JobActionUtils {
    Logger logger = LoggerFactory.getLogger(JobActionUtils.class);

    @Autowired
    private JobConfigMapper jobConfigMapper;
    @Autowired
    private JobStatusMapper jobStatusMapper;


    @Autowired
    private RedisUtils redisUtils;

    @Value("${flink.install.path}")
    private String flinkIntallPath;
    @Value("${flink.udf.jar.path}")
    private String flinkUdfJarPath;
    @Value("${flink.core.jar.path}")
    private String coreJarPath;
    @Value("${yarn.conf.path}")
    private String yarnConfPath;
    @Value("${legacy.job}")
    private String legacyJob;


    public void submit(JobConfigDto jobConfigDto) throws Exception {
        reOperation(jobConfigDto);

        if (!StringUtils.isNotBlank(jobConfigDto.getExecSql())) {
            throw new ExecSqlBlankException();
        }
        if (!StringUtils.isNotBlank(jobConfigDto.getJobName())) {
            throw new JobNameBlankException();
        }
        if (!StringUtils.isNotBlank(jobConfigDto.getDeveloper())) {
            throw new DeveloperBlankException();
        }

        String jobStatus = jobConfigMapper.queryStatusByJobName(jobConfigDto.getJobName());
        if (JobConstant.ACCEPT.equals(jobStatus) || JobConstant.RUNNING.equals(jobStatus) || JobConstant.INITIALIZING.equals(jobStatus)) {
            throw new FlinkCommonException(jobConfigDto.getJobName() + " is "+jobStatus+" ,Don't resubmit");
        }
        if (getLegacyJobs().contains(jobConfigDto.getJobName())){
            throw new JobLegacyException(jobConfigDto.getJobName());
        }

        Configuration flinkConfig = GlobalConfiguration.loadConfiguration(flinkIntallPath + "/conf");
        Map<String, String> confPropUtil = ConfUtils.confPropUtil(jobConfigDto, flinkConfig);
        Map<String, String> taskResult = null;
        try {
            taskResult = JobClient.submitTask(jobConfigDto.getExecSql(), jobConfigDto.getJobName(), confPropUtil, null);
        }catch (Exception e){
            logger.error("job submit excetion!", e);
            throw new JobSubmitFailException(e.getMessage());
        }
        saveSubmitedRes(jobConfigDto, taskResult);
    }


    public void stop(JobConfigDto jobConfigDto) {
        reOperation(jobConfigDto);
        String jobName = jobConfigDto.getJobName();
        JobConfig jobConfig = jobConfigMapper.queryJobIdByJobName(jobName);
        String status = jobConfigMapper.queryStatusByJobName(jobName);
        List<String> runStauts = new ArrayList<>();
        runStauts.add(JobConstant.RUNNING);
        runStauts.add(JobConstant.INITIALIZING);
        runStauts.add(JobConstant.ACCEPT);
        if (null == jobConfig || !runStauts.contains(status)) {
            throw new JobNotRunningException(jobName);
        }
        if (getLegacyJobs().contains(jobName)){
            throw new JobLegacyException(jobName);
        }
        try {
            JobClient.stop(jobConfig.getJobId(), jobConfig.getAppId(),
                    flinkIntallPath,
                    yarnConfPath);
        }catch (Exception var1){
            logger.error("job stop excetion! jobName:{},appId:{},jobId:{}",jobName,
                    jobConfig.getAppId(),jobConfig.getJobId(), var1);
            throw new FlinkJobOperationException(var1.getMessage());
        }


        jobConfigMapper.updateStatusByJobName(jobConfigDto.getJobName(), JobConstant.CANCELED, JobConstant.JOB_ONT_ONLINE);
        jobStatusMapper.updateStatus(jobConfigDto.getJobName(),jobConfig.getAppId(), JobConstant.CANCELED);

    }

    public void restart(JobConfigDto jobConfigDto) throws Exception {
        reOperation(jobConfigDto);

        if (!StringUtils.isNotBlank(jobConfigDto.getJobName())) {
            throw new JobNameBlankException();
        }

        JobConfig jobConfig = jobConfigMapper.queryByJobName(jobConfigDto.getJobName());
        if (null == jobConfig) {
            throw new JobNotExistException(jobConfigDto.getJobName());
        }
        if (getLegacyJobs().contains(jobConfigDto.getJobName())){
            throw new JobLegacyException(jobConfigDto.getJobName());
        }

        BasicPropUtils.buildRestartCofing(jobConfigDto, jobConfig);

        Configuration flinkConfig = GlobalConfiguration.loadConfiguration(flinkIntallPath + "/conf");
        Map<String, String> confPropUtil = ConfUtils.confPropUtil(jobConfigDto, flinkConfig);

        String jobStatus = jobConfigMapper.queryStatusByJobName(jobConfigDto.getJobName());

        if (!JobConstant.ACCEPT.equals(jobStatus) && !JobConstant.RUNNING.equals(jobStatus)) {
            jobConfigDto.setSavepoint(false);
        }
        Map<String, String> taskResult = null;
        try {
            taskResult = JobClient.restart(jobConfig.getJobId(), jobConfig.getAppId(),
                    jobConfigDto.getExecSql(), jobConfigDto.getJobName(), flinkIntallPath, yarnConfPath,
                    confPropUtil, jobConfigDto.getSavepoint(), jobConfig.getStatus());
        }catch (Exception e){
            logger.error("任务重启异常！任务名：{}，appId：{}，jobId：{}", jobConfig.getJobName(), jobConfig.getAppId(), jobConfig.getJobId(), e);
            throw new JobRestartFailException(e.getMessage());
        }
        saveSubmitedRes(jobConfigDto, taskResult);
    }


    public void restartAll(JobConfigDto jobConfigDto) throws Exception {
        List<JobConfig> jobConfigs = jobConfigMapper.queryJobByTime(jobConfigDto.getStartTime(),jobConfigDto.getEndTime());
        List<String> exceptionJobs = new ArrayList();
        jobConfigs.forEach(jobConfig -> {
            try {
                // 不执行历史遗留的任务
                if(!getLegacyJobs().contains(jobConfig.getJobName())){
                    restart(JobConfigDto.buildJobConfigDto(jobConfig).setSavepoint(true));
                    Thread.sleep(10000);
                }
            } catch (JobRestartFailException e) {
                exceptionJobs.add(jobConfig.getJobName());
            }catch (Exception e) {
                logger.error("批量重启任务异常！任务名：{}", jobConfig.getJobName(), e);
            }
        });

        if (!exceptionJobs.isEmpty()){
            throw new FlinkCommonException(" restart exception job is : "+exceptionJobs.toString());
        }
    }

    private void saveSubmitedRes(JobConfigDto jobConfigDto, Map<String, String> taskResult) {
        jobConfigMapper.saveAll(JobConfig.buildJobConfig(jobConfigDto));
        jobConfigMapper.updateSubmitedByJobName(jobConfigDto.getJobName(), taskResult.get(JobConstant.APP_ID),
                taskResult.get(JobConstant.JOB_ID), taskResult.get(JobConstant.WEB_URL), JobConstant.ACCEPT, JobConstant.JOB_IS_ONLINE);
        jobStatusMapper.updateSubmitedJob(jobConfigDto.getJobName(),jobConfigDto.getDeveloper(), taskResult.get(JobConstant.APP_ID),
                taskResult.get(JobConstant.JOB_ID), JobConstant.ACCEPT, JobConstant.ENGINE_FLINK);
    }

    private void reOperation(JobConfigDto jobConfigDto) {
        String lockKey = jobConfigDto.getJobName() + "_op";
        String lockValue = UUID.randomUUID().toString();

        if (!redisUtils.setScheduler(lockKey, lockValue,10)) {
            throw new ReOperationException(jobConfigDto.getJobName());
        }
    }

    private Set<String> getLegacyJobs(){
        Set<String> set = Arrays.stream(legacyJob.split(",")).collect(Collectors.toSet());
        return set;
    }

}
