package com.sh.data.engine.domain.datadev.flinkTaskAboutExexte;

import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import com.sh.data.engine.common.enumDefinition.FlinkJobStatusEnum;
import com.sh.data.engine.common.enumDefinition.FlinkNodeTypeEnum;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.FlinkUtil;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkJobDomain;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkResourceDomain;
import com.sh.data.engine.domain.datadev.flink.model.domain.FlinkTaskDomain;
import com.sh.data.engine.domain.datadev.flink.model.param.StartFlinkTaskParam;
import com.sh.data.engine.domain.datadev.flink.service.FlinkJobService;
import com.sh.data.engine.domain.datadev.flink.service.FlinkResourceService;
import com.sh.data.engine.domain.datadev.flink.service.FlinkTaskService;
import com.sh.data.engine.domain.util.LogUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang3.StringUtils;
import org.buildobjects.process.TimeoutException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.Properties;

@Service
@Slf4j
public class FlinkAppExcService {
    @Lazy
    @Autowired
    private FlinkTaskService flinkTaskService;

    @Lazy
    @Autowired
    private FlinkResourceService flinkResourceService;

    @Lazy
    @Autowired
    private FlinkJobService flinkJobService;

    @Lazy
    @Autowired
    private FlinkTaskApiService flinkTaskApiService;

    @Value("${flink.log.path:/data/hufu_file_storage/log/flink}")
    private String flinkLogDir;

    public Long clientJobStart(StartFlinkTaskParam startFlinkTaskParam) {
        Long nodeId = startFlinkTaskParam.getNodeId();
        final Integer execType = startFlinkTaskParam.getExecType();
        String savePoint = startFlinkTaskParam.getSavePoint();

        String logFileName = String.format("%s_%s", nodeId, 1);
        String logFilePath = String.format("%s/%s.log", flinkLogDir, logFileName);

        final FlinkJobDomain flinkJobDomain = flinkJobService.getByNodeId(nodeId, execType);
        if (null != flinkJobDomain
            && Objects.equals(flinkJobDomain.getStatus(), FlinkJobStatusEnum.RUNNING.getCode())) {
            return flinkJobDomain.getId();
        }

        // 先删除旧日志
        FileUtils.deleteQuietly(new File(logFilePath));

        if (nodeId == null) {
            throw new IllegalArgumentException("nodeId can't be null");
        }

        final FlinkTaskDomain taskDomain = flinkTaskService.getActiveByNodeId(nodeId);
        if (null == taskDomain) {
            throw new BusinessException("active task don't exist");
        }

        final Long jobId =
            flinkJobService.addJob(
                taskDomain.getProjectId(),
                nodeId,
                taskDomain.getId(),
                FlinkNodeTypeEnum.TASK_JAR.getCode(),
                1,
                taskDomain.getNextExecTime());

        try {
            final FlinkResourceDomain resourceDomain =
                flinkResourceService.getActiveByNodeId(taskDomain.getResourceNodeId());

            if (null == resourceDomain) {
                throw new IllegalArgumentException("resource don't exsit");
            }

            final String mainClass = taskDomain.getMainClass();
            final String jarPath = resourceDomain.getPath();
            final String envConfig = taskDomain.getEnvConfig();
            List<String> logList = Lists.newArrayList();
            logList.add("调度中..");
            logList.add("JAR_PATH:" + jarPath);
            logList.add("MAINCLASS:" + mainClass);
            logList.add("jobId:" + jobId);
            logList.add("nodeId:" + nodeId);
            logList.add("logFileName:" + logFileName);
            LogUtil.writeFlinkLog(nodeId, logList);
            Thread thread =
                new Thread(
                    () -> {
                        try {
                            final FlinkUtil.StartJobParam startJobParam = new FlinkUtil.StartJobParam();
                            startJobParam.setEngineJobId(jobId);
                            startJobParam.setMainClass(mainClass);
                            startJobParam.setJar(jarPath);
                            startJobParam.setSavePoint(savePoint);

                            Properties properties = FlinkUtil.getDefaultProperties();

                            if (StringUtils.isNotBlank(envConfig)) {
                                properties.load(new StringReader(envConfig));
                                startJobParam.setExtProperties(properties);
                            }

                            final FlinkUtil.FlinkJobResult flinkJobResult = FlinkUtil.startJob(startJobParam);
                            logList.add("flinkJobResult->line123:" + JSON.toJSONString(flinkJobResult));
                            LogUtil.writeFlinkLog(nodeId, logList);
                            flinkJobService.updateJob(
                                flinkJobResult.getEngineJobId(),
                                flinkJobResult.getFlinkJobIds(),
                                flinkJobResult.getYarnApplicationIds());
                            if (CollectionUtils.isNotEmpty(flinkJobResult.getFlinkJobIds())) {
                                logList.addAll(flinkJobResult.getLogList());
                                logList.add("flinkJobResult:" + JSON.toJSONString(flinkJobResult));
                                logList.add("任务已提交到flink..");
                            }
                        } catch (Exception e) {
                            log.error(e.getMessage(), e);

                            if (e instanceof TimeoutException) {
                                logList.add(
                                    String.format(
                                        "[%s] Deployment took more than 60 seconds. Please check if the requested resources are available in the YARN cluster",
                                        DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS")));
                                flinkTaskApiService.stopFlinkTask(
                                    jobId, FlinkJobStatusEnum.PAUSED.getCode(), false);
                            } else {
                                flinkTaskApiService.stopFlinkTask(
                                    jobId, FlinkJobStatusEnum.FAILURE.getCode(), false);
                                logList.add(
                                    String.format(
                                        "[%s] %s",
                                        DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS"),
                                        ExceptionUtils.getFullStackTrace(e)));
                            }
                        } finally {
                            try {
                                LogUtil.writeFlinkLog(nodeId, logList);
                            } catch (IOException ex) {
                                log.error(ex.getMessage(), ex);
                            }
                        }
                    });
            thread.start();

        } catch (Exception e) {
            flinkTaskApiService.stopFlinkTask(jobId, FlinkJobStatusEnum.FAILURE.getCode(), false);
            log.error(e.getMessage(), e);
        }

        return jobId;
    }
}
