package cn.ac.iscas.service.executor.flink;

import cn.ac.iscas.base.utils.SpringUtil;
import cn.ac.iscas.base.utils.SshUtils;
import cn.ac.iscas.config.FlinkConfig;

import cn.ac.iscas.dao.IJobMetadataDao;
import cn.ac.iscas.dao.entity.FlinkJobHistoryDO;
import cn.ac.iscas.dao.impl.FlinkJobHistoryDao;
import cn.ac.iscas.domain.job.JobStatus;
import cn.ac.iscas.domain.response.job.flink.SubmitRemoteFlinkJobResponse;
import cn.ac.iscas.service.flink.LogService;
import cn.ac.iscas.service.queue.SqlExecuteQueueService;
import com.iscas.datasong.lib.common.DataSongException;
import com.iscas.datasong.lib.common.Status;
import com.iscas.datasong.lib.util.DataSongJsonUtils;
import com.iscas.datasong.lib.util.OkHttpClientUtils;
//import org.apache.hadoop.conf.Configuration;
//import org.apache.hadoop.fs.FileSystem;
//import org.apache.hadoop.fs.Path;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.client.LaxRedirectStrategy;
import org.apache.http.message.BasicHeader;
import org.apache.http.util.EntityUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.stream.Collectors;

/**
 * @Author: DataSong
 * @Descrition: 基于flink运行任务
 * @Date: Create in 2020/3/3 22:49
 * @Modified By:
 */
@Service
public class FlinkJobExecutor {
    @Autowired
    private FlinkConfig flinkConfig;

    @Autowired
    private SimpMessagingTemplate messagingTemplate;

    @Autowired
    private IJobMetadataDao jobMetadataDao;

    @Autowired
    private FlinkJobHistoryDao flinkJobHistoryDao;


    public SubmitRemoteFlinkJobResponse execute(String jobId) throws DataSongException, JSONException {
        SqlExecuteQueueService sqlExecuteQueueService = SpringUtil.getBean(SqlExecuteQueueService.class);
        ConcurrentLinkedQueue<String> sqls = sqlExecuteQueueService.getSqlsByJobId(jobId);

        String jarPath = "";
        String entryClass = "";
        String savePointPath = "";
        Integer parallelism = 1;

        if (jarPath == null || jarPath.isEmpty()) {
            jarPath = flinkConfig.getJarLocalPath();
        }

        if (entryClass == null || entryClass.isEmpty()) {
            entryClass = flinkConfig.getJarEntryClass();
        }

        if (savePointPath == null || savePointPath.isEmpty()) {
            savePointPath = flinkConfig.getSavePointPath();
        }

        if (parallelism == null) {
            parallelism = flinkConfig.getFlinkRunParallelism();
        }

        String programArgs = "\"" + DataSongJsonUtils.toJson(sqls).replace("\"", "\\\"")
                .replace("\r\n", " ")
                .replace("\n", " ")
                .trim()
                .replaceAll("\\s+", " ") + "\"";

//        SubmitRemoteFlinkJobResponse response = submitJob(jarPath, entryClass, programArgs, savePointPath, parallelism, jobId);
        SubmitRemoteFlinkJobResponse response = submitJobV2(jarPath, entryClass, programArgs, savePointPath, parallelism, jobId);
        return response;
    }


    public SubmitRemoteFlinkJobResponse submitJob(String jarPath, String entryClass, String programArgs,
                                                  String savePointPath, Integer parallelism, String jobId) throws JSONException {
        SubmitRemoteFlinkJobResponse response = new SubmitRemoteFlinkJobResponse();
        String uploadJarUrl = new StringBuffer("http://").append(flinkConfig.getFlinkJobmanagerAddr())
                .append(":").append(flinkConfig.getFlinkJobmanagerPort())
                .append(flinkConfig.getFlinkUploadJarUrl()).toString();
        //上传Jar包到flink集群
        //TODO:前台完成后打开
        messagingTemplate.convertAndSend("/topic/log/" + jobId, "开始上传Jar文件...");
        ///messagingTemplate.convertAndSend("/topic/log/sss", "开始上传Jar文件...");
        JSONObject uploadResponse = uploadJar(uploadJarUrl, jarPath);
        if (uploadResponse != null
                && !uploadResponse.has("errors")
                && uploadResponse.getString("status").equals("success")) {
            //获取Jar包在Flink集群中存储的路径
            String uploadedJarFullpath = uploadResponse.getString("filename");
            String uploadedJarName = uploadedJarFullpath.substring(uploadedJarFullpath.lastIndexOf("/") + 1);
            //填充上传jar的response字段
            response.setUploadInfo("Jar upload success!");
            response.setUploadJarName(uploadedJarName);
            response.setUploadJarPath(uploadedJarFullpath.substring(0, uploadedJarFullpath.lastIndexOf("/")));
            response.setUploadStatus(uploadResponse.getString("status"));
            //TODO:前台完成后打开
            messagingTemplate.convertAndSend("/topic/log/"+jobId, "Jar文件上传成功...");
            //messagingTemplate.convertAndSend("/topic/log/sss", "Jar文件上传成功...");
            //构建Flink运行Jar包所需参数，默认在配置文件中配置，可考虑在Job中添加字段
            String submitCommand = "";
            if (uploadedJarName.contains("flink-stream")) {
                submitCommand = String.format("%s/bin/flink run -d -p %s -c %s %s %s", flinkConfig.getFlinkJobmanagerHome(),
                        parallelism,
                        entryClass,
                        uploadedJarFullpath,
                        programArgs);
            } else {
                submitCommand = String.format("%s/bin/flink run -p %s -c %s %s %s", flinkConfig.getFlinkJobmanagerHome(),
                        parallelism,
                        entryClass,
                        uploadedJarFullpath,
                        programArgs);
            }

            //更新任务表，将任务状态修改为[运行]
            jobMetadataDao.updateJobStatus(jobId, JobStatus.RUNNING.name());

            //TODO:前台完成后打开
            messagingTemplate.convertAndSend("/topic/log/"+jobId, "开始执行任务...");
            //Flink运行Jar包
            //String jobId = runJob(submitCommand);
            Map<String, Object> outputInfo = runJob(submitCommand, jobId);

            List<String> logs = (List) outputInfo.get(flinkConfig.getOutputInfoLog());
            if (logs != null && logs.size() > 0) {
                String logString = logs.stream().collect(Collectors.joining(System.lineSeparator()));
                outputInfo.put(flinkConfig.getOutputInfoLog(), logString);
            }
            String flinkJobId = outputInfo.containsKey("flinkJobId") ? outputInfo.get("flinkJobId").toString() : null;
            if (flinkJobId != null && !flinkJobId.isEmpty()) {
                response.setJobId(flinkJobId);
                response.setRunStatus("success");
                response.setRunInfo("start running");
                response.setStatus(Status.OK.getValue());
                response.setInfo("Submit job success");
            } else {
                response.setRunStatus("errors");
                response.setRunInfo("Execute submit command error!");
                response.setStatus(Status.CLIENT_ERROR.getValue());
                response.setInfo("Submit job fail!");
            }
            response.setOutputInfo(outputInfo);
        } else {
            response.setUploadStatus("error");
            response.setUploadInfo(uploadResponse.getString("errors"));
        }
        return response;
    }

    public SubmitRemoteFlinkJobResponse submitJobV2(String jarPath, String entryClass, String programArgs,
                                                  String savePointPath, Integer parallelism, String jobId) throws JSONException {
        SubmitRemoteFlinkJobResponse response = new SubmitRemoteFlinkJobResponse();
        String remoteJarPath = flinkConfig.getFlinkExecJarRemotePath();
        //获取Jar包在Flink集群中存储的路径
        String uploadedJarName = remoteJarPath.substring(remoteJarPath.lastIndexOf("/") + 1);
        //判断jar是否已存在flink服务器上，如果已存在则跳过上传
        if (isJarExistInFlinkCluster(remoteJarPath)) {
            response.setUploadInfo("Jar already exist in flink cluster!");
            response.setUploadStatus("success");
            response.setUploadJarName(remoteJarPath.substring(remoteJarPath.lastIndexOf("/") + 1));
            response.setUploadJarPath(remoteJarPath.substring(0, remoteJarPath.lastIndexOf("/")));
            messagingTemplate.convertAndSend("/topic/log/" + jobId, "Jar文件上传成功...");
        } else {
            String uploadJarUrl = new StringBuffer("http://").append(flinkConfig.getFlinkJobmanagerAddr())
                    .append(":").append(flinkConfig.getFlinkJobmanagerPort())
                    .append(flinkConfig.getFlinkUploadJarUrl()).toString();
            //上传Jar包到flink集群
            //TODO:前台完成后打开
            messagingTemplate.convertAndSend("/topic/log/" + jobId, "开始上传Jar文件...");
            ///messagingTemplate.convertAndSend("/topic/log/sss", "开始上传Jar文件...");
            JSONObject uploadResponse = uploadJar(uploadJarUrl, jarPath);
            if (uploadResponse != null
                    && !uploadResponse.has("errors")
                    && uploadResponse.getString("status").equals("success")) {
                //获取Jar包在Flink集群中存储的路径
                remoteJarPath = uploadResponse.getString("filename");
                uploadedJarName = remoteJarPath.substring(remoteJarPath.lastIndexOf("/") + 1);
                //填充上传jar的response字段
                response.setUploadInfo("Jar upload success!");
                response.setUploadJarName(uploadedJarName);
                response.setUploadJarPath(remoteJarPath.substring(0, remoteJarPath.lastIndexOf("/")));
                response.setUploadStatus(uploadResponse.getString("status"));
                //TODO:前台完成后打开
                messagingTemplate.convertAndSend("/topic/log/" + jobId, "Jar文件上传成功...");
            } else {
                response.setUploadStatus("error");
                response.setUploadInfo(uploadResponse.getString("errors"));
            }
        }


         //构建Flink运行Jar包所需参数，默认在配置文件中配置，可考虑在Job中添加字段
            String submitCommand = "";
            if (uploadedJarName.contains("flink-stream")) {
                submitCommand = String.format("%s/bin/flink run -d -p %s -c %s %s %s", flinkConfig.getFlinkJobmanagerHome(),
                        parallelism,
                        entryClass,
                        remoteJarPath,
                        programArgs);
            } else {
                submitCommand = String.format("%s/bin/flink run -p %s -c %s %s %s", flinkConfig.getFlinkJobmanagerHome(),
                        parallelism,
                        entryClass,
                        remoteJarPath,
                        programArgs);
            }

            //更新任务表，将任务状态修改为[运行]
            jobMetadataDao.updateJobStatus(jobId, JobStatus.RUNNING.name());

            //TODO:前台完成后打开
            messagingTemplate.convertAndSend("/topic/log/"+jobId, "开始执行任务...");
            //Flink运行Jar包
            //String jobId = runJob(submitCommand);
            Map<String, Object> outputInfo = runJob(submitCommand, jobId);

            List<String> logs = (List) outputInfo.get(flinkConfig.getOutputInfoLog());
            if (logs != null && logs.size() > 0) {
                String logString = logs.stream().collect(Collectors.joining(System.lineSeparator()));
                outputInfo.put(flinkConfig.getOutputInfoLog(), logString);
            }
            String flinkJobId = outputInfo.containsKey("flinkJobId") ? outputInfo.get("flinkJobId").toString() : null;
            if (flinkJobId != null && !flinkJobId.isEmpty()) {
                response.setJobId(flinkJobId);
                response.setRunStatus("success");
                response.setRunInfo("start running");
                response.setStatus(Status.OK.getValue());
                response.setInfo("Submit job success");
            } else {
                response.setRunStatus("errors");
                response.setRunInfo("Execute submit command error!");
                response.setStatus(Status.CLIENT_ERROR.getValue());
                response.setInfo("Submit job fail!");
            }
            response.setOutputInfo(outputInfo);

        return response;
    }

    public JSONObject uploadJar(String requestUrl, String jarPath) {
        CloseableHttpClient httpClient = HttpClients.custom().setRedirectStrategy(new LaxRedirectStrategy()).build();

        File fileToUpload = new File(jarPath);
        HttpPost uploadFileUrl = new HttpPost(requestUrl);
        //必须是Multipart文件上传
        MultipartEntityBuilder builder = MultipartEntityBuilder.create();
        builder.addBinaryBody("jarfile", fileToUpload);

        HttpEntity multipart = builder.build();
        JSONObject jobUploadResponse = null;
        uploadFileUrl.setEntity(multipart);
        CloseableHttpResponse response = null;
        try {
            response = httpClient.execute(uploadFileUrl);
            response.setHeader(new BasicHeader("Expect", ""));
            String bodyAsString = EntityUtils.toString(response.getEntity(), "UTF-8");
            jobUploadResponse = new JSONObject(bodyAsString);
        } catch (IOException e) {
            e.printStackTrace();
        } catch (JSONException e) {
            e.printStackTrace();
        } finally {
            try {
                response.close();
                httpClient.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        return jobUploadResponse;
    }

    /**
     * @description: 调用远程ssh执行flink run指令，如果加上-d则是detach模式；若要使用JAVA_HOME等环境变量，需要在/etc/bashrc文件中设置
     * @param: command
     * @param: jobId 数据中台中的jobId，非flink的jobId
     * @return: java.util.Map<java.lang.String,java.lang.Object>
     * @author LJian
     * @date: 2023/1/30 13:37
     */
    private Map<String, Object> runJob(String command, String jobId) {
        Map<String, Object> outputInfo = new HashMap<>(32);
        String flinkJobId = "";
        int returnCode = 0;
        List<String> flinkJobIds = new ArrayList<>();
        SshUtils sshUtils = new SshUtils(flinkConfig.getFlinkJobmanagerAddr(), flinkConfig.getFlinkJobmanagerSshUser(), flinkConfig.getFlinkJobmanagerSshPassword(), StandardCharsets.UTF_8);
        try {
            System.out.println("Prepare to run job!");
            //StringBuilder result = sshUtils.exec(command);
            Map<String, String> result = sshUtils.exec(command);
            System.out.println(command);
            System.out.println("Start to run job！");
            System.out.println("OUT==>  " + result.get("out"));
            System.out.println("ERR==>  " + result.get("err"));

            //获取jobId
            String out = result.getOrDefault("out", "");
            if (out.contains("Job has been submitted with JobID")) {
                String[] tmp = out.split(" ");
                flinkJobId = tmp[tmp.length - 1];
                flinkJobIds.add(flinkJobId);
                outputInfo.put("flinkJobId", flinkJobId);
                LogService.runningJobIds.put(flinkJobId, jobId);

                //如果jobId在flink_job_history已经存在，说明当前flink任务不是第一次执行
                int latestExecuteCount = 0;
                if(flinkJobHistoryDao.selectCountByJobId(jobId) > 0) {
                    latestExecuteCount = flinkJobHistoryDao.selectMaxExecuteCoundByJobId(jobId);

                } else {
                    //如果jobId在flink_job_history不存在，说明当前flink任务是第一次执行
                }

                FlinkJobHistoryDO flinkJobHistoryDO = new FlinkJobHistoryDO();
                flinkJobHistoryDO.setFlinkJobId(flinkJobId);
                flinkJobHistoryDO.setJobId(jobId);
                flinkJobHistoryDO.setExecuteTime(Date.from(ZonedDateTime.now(ZoneId.of("Asia/Shanghai")).toInstant()));
                flinkJobHistoryDO.setExecuteCount(latestExecuteCount + 1);
                flinkJobHistoryDO.setStatus(JobStatus.RUNNING.name());
                flinkJobHistoryDO.setDuration(0L);
                flinkJobHistoryDao.insert(flinkJobHistoryDO);

                jobMetadataDao.updateJobExecuteTime(jobId, Date.from(ZonedDateTime.now(ZoneId.of("Asia/Shanghai")).toInstant()));

            } else if (out.contains(flinkConfig.getOutputInfoTag())) {
                out = out.replace(flinkConfig.getOutputInfoTag(), "");
                String[] tmp = out.split(flinkConfig.getOutputInfoSpliter());
                if (tmp.length == 2) {
                    if (tmp[0].equalsIgnoreCase(flinkConfig.getOutputInfoEval())) {
                        Map<String, Object> evalResult = new HashMap<>(16);
                        //eval!{Weighted Sensitivity=0.8457943925233645, Accuracy=0.8457943925233645, Macro Precision=0.7300249687890137, Micro Recall=0.8457943925233645}
                        String mapStr = tmp[1].replace("{", "").replace("}", "");
                        String[] mapEles = mapStr.split(",");
                        for (String mapEle : mapEles) {
                            String[] keyVal = mapEle.split("=");
                            if (keyVal.length == 2) {
                                evalResult.put(keyVal[0], keyVal[1]);
                            }
                        }
                        outputInfo.put(flinkConfig.getOutputInfoEval(), evalResult);
                    } else if (tmp[0].equalsIgnoreCase(flinkConfig.getOutputInfoLog())) {
                        if (outputInfo.containsKey(flinkConfig.getOutputInfoLog())) {
                            ((List) outputInfo.get(flinkConfig.getOutputInfoLog())).add(tmp[1]);
                        } else {
                            List<String> logs = new ArrayList<>();
                            logs.add(tmp[1]);
                            outputInfo.put(flinkConfig.getOutputInfoLog(), logs);
                        }
                    } else {
                        outputInfo.put(tmp[0], tmp[1]);
                    }
                }
            }

        } catch (Exception e) {
            e.printStackTrace();
        }

        return outputInfo;
    }

    public boolean stop(String flinkJobId) {
        String stopJobUrl = new StringBuffer("http://").append(flinkConfig.getFlinkJobmanagerAddr())
                .append(":").append(flinkConfig.getFlinkJobmanagerPort())
                .append("/jobs/")
                .append(flinkJobId)
                .append("?mode=cancel")
                .toString();
        CloseableHttpClient httpClient = HttpClients.custom().setRedirectStrategy(new LaxRedirectStrategy()).build();
        //需要Patch方法
        HttpPatch runJobUrl = new HttpPatch(stopJobUrl);
        try {
            httpClient.execute(runJobUrl);
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        } finally {
            try {
                httpClient.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        return true;
    }

    public Map<String, JobStatus> getJobStatus(List<String> ids) {
        OkHttpClientUtils httpClient = new OkHttpClientUtils();
        Map<String, JobStatus> result = new HashMap<>();
        String baseUrl = new StringBuffer("http://")
                .append(flinkConfig.getFlinkJobmanagerAddr())
                .append(":")
                .append(flinkConfig.getFlinkHistorymanagerPort())
                .append("/jobs/")
                .toString();
        try {
            for (String id : ids) {
                String jobUrl = baseUrl + id;
                String response = httpClient.doGet(jobUrl);
                Map<String, Object> responseMap = DataSongJsonUtils.fromJson(response, Map.class);
                if (responseMap.containsKey("errors")) {
                    result.put(id, JobStatus.UNSUBMITTED);
                } else {
                    String status = responseMap.get("state").toString().equalsIgnoreCase("FINISHED") ? "SUCCEEDED" : responseMap.get("state").toString();
                    result.put(id, JobStatus.analyze(status));
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        } catch (DataSongException e) {
            e.printStackTrace();
        }

        return result;
    }

    public void flinkJobFinishedNotice(String jobId, String flinkJobId, String status, String duration) {
        Long durationTime = Long.parseLong(duration);
        String jobStatus = "";
        if (status.equalsIgnoreCase("FINISHED")) {
            jobStatus = JobStatus.SUCCEEDED.name();
        }

        jobMetadataDao.updateJobStatusDuration(jobId, jobStatus, durationTime);
        flinkJobHistoryDao.updateFlinkJobStatusDuration(flinkJobId, jobStatus, durationTime);
    }

    private boolean isJarExistInFlinkCluster(String remoteJarPath) {
        SshUtils sshUtils = new SshUtils(flinkConfig.getFlinkJobmanagerAddr(), flinkConfig.getFlinkJobmanagerSshUser(), flinkConfig.getFlinkJobmanagerSshPassword(), StandardCharsets.UTF_8);
        String command = String.format("[ -f %s ] && echo true || echo false", remoteJarPath);
        try {
            Map<String, String> result = sshUtils.exec(command);
            String res = result.getOrDefault("out", null);
            return Boolean.parseBoolean(res.trim());
        } catch (IOException e) {
            throw new RuntimeException(e);
        }

    }

}
