package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.properties.BdpJobConfig;
import cn.getech.data.development.config.properties.DataDevelopmentConfig;
import cn.getech.data.development.constant.Constant;
import cn.getech.data.development.constant.RunStatus;
import cn.getech.data.development.constant.YesOrNoEnum;
import cn.getech.data.development.entity.RealTimeTask;
import cn.getech.data.development.entity.RealtimeQueueEntity;
import cn.getech.data.development.entity.RealtimeResourceEntity;
import cn.getech.data.development.mapper.RealTimeTaskMapper;
import cn.getech.data.development.mapper.RealtimeQueueMapper;
import cn.getech.data.development.model.vo.RealTimeTaskVO;
import cn.getech.data.development.service.FLinkService;
import cn.getech.data.development.service.RealTimeTaskService;
import cn.getech.data.development.service.RealtimeResourceService;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.development.utils.HiveTableUtil;
import cn.getech.data.development.utils.shell.ExecuteShellUtil;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.DateUtils;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSchException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

@Slf4j
@Component
public class FLinkServiceImpl implements FLinkService {

    @Autowired
    private DataDevelopmentConfig config;
    @Autowired
    private BdpJobConfig bdpJobConfig;
    @Autowired
    private RealtimeResourceService realtimeResourceService;
    @Autowired
    private RealtimeQueueMapper realtimeQueueMapper;
    @Autowired
    private RealTimeTaskService realTimeTaskService;
    @Resource
    private RealTimeTaskMapper realTimeTaskMapper;
    private String initStartCmd(RealTimeTaskVO task, String logFile,String sqlFile) {
        StringBuilder command = new StringBuilder();
        String mainFunction = task.getMainFunction();
        String jarName = bdpJobConfig.getRealjarname();
        if (Constant.RealTimeTaskType.FLINK.getCode().equals(task.getTaskType())) {
            RealtimeResourceEntity resource = realtimeResourceService.getById(task.getResourceId());
            if (StringUtils.isNotEmpty(resource.getResourceFilename())) {
                jarName = resource.getResourceFilename().split("/")[1];
            }
            String downJarCommand = "hadoop fs -get -f " + bdpJobConfig.getRealtime() + "/" +
                    resource.getResourceFilename() + " " + bdpJobConfig.getRealtaskhome();
            command.append(downJarCommand).append(" \n ");
        }
        if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(task.getTaskType())) {
            mainFunction = "cn.getech.data.development.task.FlinkStreamSQLMain";
            task.setMainArgs(sqlFile);
        }  else if (Constant.RealTimeTaskType.FLINK_SQL_DAG.getCode().equals(task.getTaskType())) {
            mainFunction = "cn.getech.data.development.task.FlinkStreamDAGSQLMain";
            task.setMainArgs(sqlFile);
        } else if (Constant.RealTimeTaskType.REAL_TIME_COLLECTION.getCode().equals(task.getTaskType())) {
            mainFunction = "cn.getech.data.development.task.FlinkStreamSyncHiveMain";
            task.setMainArgs(sqlFile);
        }
        command.append("source /etc/profile && nohup flink run -m yarn-cluster");
        command.append(" -c ").append(mainFunction);
        if (task.getParallelism() != null) {
            command.append(" -p ").append(task.getParallelism());
        }
        if (task.getJobManagerMemory() != null) {
            command.append(" -yjm ").append(task.getJobManagerMemory());
        }
        if (task.getTaskManagerMemory() != null) {
            command.append(" -ytm ").append(task.getTaskManagerMemory());
        }
        RealtimeQueueEntity queueEntity = realtimeQueueMapper.selectByProcId(task.getProcId());
        if (queueEntity != null) {
            command.append(" -yqu ").append(queueEntity.getQueueName());
        }
        if (task.getSlot() != null) {
            command.append(" -ys ").append(task.getSlot());
        }
        if (StringUtils.isNotEmpty(task.getSavepointPath())) {
            String savepointPath = task.getSavepointPath().replaceAll("\n", "");
            command.append(" -s ").append(savepointPath);
        }
        command.append(" ").append(bdpJobConfig.getRealtaskhome()).append(jarName);
        if (StringUtils.isNotEmpty(task.getMainArgs())) {
            command.append(" ").append(task.getMainArgs());
        }
        command.append(">>").append(logFile)
                .append(" 2>&1 ").append(" & \n");
        return command.toString();
    }

    @Override
    public void saveLocalConfig(RealTimeTaskVO task) {
        String pre = "task_" + task.getMenuId();
        String tempDir = bdpJobConfig.getRealtaskhome();
        String local = "local_topic_" + task.getMenuId();
        String prefix = Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
        String uploadFile = tempDir + prefix + "/" + pre + ".json";
        try {
            ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
            shell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String configContent = shell.getFileLog(uploadFile);
            configContent = configContent.replaceAll("'connector.topic' = '",
                    "'connector.topic' = '" + local);
            JSONObject configJson = JSONObject.parseObject(configContent);
            configJson.put("isDebug","1");
            shell.uploadFile(configJson.toJSONString(), tempDir + prefix + "/" + pre + local + ".json");
            shell.close();
        } catch (Exception e) {
            log.error("flink替换文件参数失败：{}", uploadFile, e);
        }
    }

    @Override
    public Map<String, String> localRun(RealTimeTaskVO task) {
        saveConfig(task);
        saveLocalConfig(task);
        String pre = "task_" + task.getMenuId();
        Map<String, String> resultMap = new HashMap<>();
        String local = "local_topic_" + task.getMenuId();
        String tempDir = bdpJobConfig.getRealtaskhome();
        String prefix = Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
        String logFile = tempDir + prefix + "/" + local + pre + DateUtils.format(new Date(),
                "yyyyMMdd_HH_mm_ss") + ".log";
        resultMap.put("logfile", logFile);
        ExecuteShellUtil shell = null;
        try {
            shell = ExecuteShellUtil.getInstance();
            shell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String uploadFile = tempDir + prefix + "/" + pre + local + ".json";
            String startFile = tempDir + prefix + "/" + pre + local + "start.sh";
            String cmd = initStartCmd(task, logFile, uploadFile);
            shell.uploadFile(cmd, startFile);
            shell.uploadFile("调试运行：\n", logFile);
            shell.execCmd("cd " + tempDir + " && sh " + startFile);
        } catch (Exception e) {
            log.error("flink运行失败：{}", logFile, e);
        } finally {
            if (shell != null) {
                shell.close();
            }
        }
        //删除历史数据
        HdfsUtil hdfsUtil = null;
        String jobName = task.getId() + "_menuId_" + task.getMenuId();
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            hdfsUtil.delete(tempDir + "/debug/" + jobName + ".txt");
        } catch (Exception e) {
        }finally {
            if (hdfsUtil != null) {
                hdfsUtil.close();
            }
        }
        ScheduledExecutorService pool = Executors.newScheduledThreadPool(1);
        AtomicInteger num= new AtomicInteger(1);
        pool.scheduleAtFixedRate(() -> {
            ExecuteShellUtil shellLog = null;
            try {
                shellLog = ExecuteShellUtil.getInstance();
                shellLog.init(config.getFLinkHost(), config.getFLinkPort(),
                        config.getFLinkUserName(), config.getFLinkPwd());
                String res = shellLog.getFile(logFile);
                String appId = parseRes(res, 1);
                if (StringUtils.isNotEmpty(appId)) {
                    appId = appId.replaceAll("\n", "");
                    RealTimeTask status = new RealTimeTask();
                    status.setMenuId(task.getMenuId());
                    status.setApplicationId(appId);
                    status.setRunState(RunStatus.RUNNING.getStatus());
                    realTimeTaskMapper.updateByMenuId(status);
                    String jobId = parseRes(res, 0);
                    if (StringUtils.isNotEmpty(jobId)) {
                       // Thread.sleep(20000);
                        String[] nameNodes = bdpJobConfig.getYarnmasters().split(",");
                        StringBuilder command = new StringBuilder();
                        command.append("source /etc/profile \n flink cancel ")
                                .append(jobId).append(" -yid ").append(appId).append(" >> ")
                                .append(logFile)
                                .append(" 2>&1 ").append(" & \n");
                        //String stopFile = tempDir + prefix + "/cancel_" + pre + local + ".sh";
                        //shellLog.uploadFile(command.toString(), stopFile);
                        //shellLog.execCmd("cd " + tempDir + " && sh " + stopFile);
                        String url = "http://" + nameNodes[0] + "/proxy/" + appId+"/#/overview" ;
                        shellLog.uploadFile("\n任务地址:" + url+"\n停止命令：\n"+command.toString(),
                                logFile, ChannelSftp.APPEND);
                        pool.shutdown();
                    }
                } else {
                    num.addAndGet(1);
                }
            } catch (Exception e) {
                log.warn("读取数据异常:{}", e.getMessage());
            } finally {
                if (shellLog != null) {
                    shellLog.close();
                }
            }
            num.getAndIncrement();
            if (num.get() > 20) {//失败20次,两分钟后就关闭循环
                log.warn("获取调试结果失败:{}",logFile);
                pool.shutdown();
            }
        }, 2, 5, TimeUnit.SECONDS);//每五秒检查一次状态
        return resultMap;
    }

    @Override
    public String getDebugData(String appId) {
        HdfsUtil hdfsUtil = null;
        String tempDir = bdpJobConfig.getRealtaskhome();
        RealTimeTask task = realTimeTaskMapper.selectOne(new QueryWrapper<RealTimeTask>()
                .eq("job_id", appId).last("limit 1"));
        String jobName = task.getId() + "_menuId_" + task.getMenuId();
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            String data = hdfsUtil.readFileData(tempDir + "/debug/" + jobName + ".txt");
            return data;
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            if (hdfsUtil != null) {
                hdfsUtil.close();
            }
        }
        return null;
    }

    @Override
    public void saveConfig(RealTimeTaskVO task) {
        String pre = "task_" + task.getMenuId();
        String tempDir = bdpJobConfig.getRealtaskhome();
        String prefix =Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
        String logFile = tempDir + prefix + "/flink" + pre + "tem.log";
        try {
            ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
            shell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String uploadFile = tempDir + prefix + "/" + pre + ".json";
            if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(task.getTaskType()) ||
                    Constant.RealTimeTaskType.FLINK_SQL_DAG.getCode().equals(task.getTaskType())
            ) {
                String sql = realTimeTaskService.wrapFlinkSql(task.getMenuId(), YesOrNoEnum.YES.getValue());
                shell.uploadFile(sql, uploadFile);
            }else if ( Constant.RealTimeTaskType.REAL_TIME_COLLECTION.getCode().equals(task.getTaskType())){
                String config = realTimeTaskService.wrapCollectionTask(task.getMenuId());
                shell.uploadFile(config, uploadFile);
            }
            String startFile = tempDir + prefix + "/" + pre + "start.sh";
            String cmd = initStartCmd(task, logFile, uploadFile);
            shell.uploadFile(cmd, startFile);
            shell.close();
        } catch (Exception e) {
            log.error("flink运行失败：{}", logFile, e);
        }
    }

    @Override
    public Map<String, String> run(RealTimeTaskVO task) {
        String pre = "task_" + task.getMenuId();
        Map<String, String> resultMap = new HashMap<>();
        String tempDir = bdpJobConfig.getRealtaskhome();
        String prefix =Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
        String logFile = tempDir + prefix + "/flink" +pre+ DateUtils.format(new Date(),
                "yyyyMMdd_HH_mm_ss") + ".log";
        resultMap.put("logfile", logFile);
        try {
            ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
            shell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String uploadFile = tempDir + prefix + "/" + pre + ".json";
            String startFile = tempDir + prefix + "/" + pre + "start.sh";
            String cmd = initStartCmd(task, logFile, uploadFile);
            shell.uploadFile("真实运行：\n", logFile);
            shell.uploadFile(cmd, startFile);
            shell.execCmd("cd " + tempDir + " && sh " + startFile);
            shell.close();
            Thread.sleep(2000);
            return resultMap;
        } catch (Exception e) {
            log.error("flink运行失败：{}", logFile, e);
        }
        return resultMap;
    }

    private String parseRes(String res, int flag) {
        if (flag == 2) { //save path
            int index=res.indexOf("Savepoint completed. Path:");
            if (index < 1) {
                return null;
            }
            int start = index + 27;
            String temp = res.substring(start);
            int end = temp.indexOf("You can resume");
            return temp.substring(0, end).replaceAll("\n","");
        } else if (flag == 1) { //appId
            int index=res.indexOf("Submitting application master");
            if (index < 1) {
                return null;
            }
            int start = index + 30;
            String temp = res.substring(start);
            int end = temp.indexOf("INFO  org") - 24;
            return temp.substring(0, end).replaceAll("\n","");
        } else { //jobId
            int index = res.indexOf("submitted with JobID");
            if (index < 1) {
                return null;
            }
            int startJob = index + 21;
            int endJob = startJob + 32;
            //String application = res.substring(start,end);
            return res.substring(startJob, endJob);
        }
    }

    @Override
    public String getAppId(String logfile) {
        try {
            ExecuteShellUtil resShell = ExecuteShellUtil.getInstance();
            resShell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String res = resShell.getFile(logfile);
            resShell.close();
            String appId = parseRes(res, 1);
            if (appId != null) {
                appId = appId.replaceAll("\n", "");
                return appId;
            }
        } catch (JSchException e) {
            log.warn("日志文件获取失败：logfile{} msg:{}", logfile, e.getCause());
        }
        return null;
    }
    private  static HashMap<Integer, ScheduledExecutorService> poolMap = new HashMap<>();
    @Override
    public String pause(RealTimeTaskVO task) {
        ExecuteShellUtil resShell = ExecuteShellUtil.getInstance();
        try {
            Integer menuId=task.getMenuId();
            if (poolMap.containsKey(menuId)) {
                return "任务正在暂停，稍后尝试";
            }
            /**
             *  flink savepoint fffdc9ee70068dc1d859022270a365ee hdfs:///user/flink/savepoint_356 -yid application_1605258954487_22474
             */
            String now = menuId + "_" + DateUtils.format(new Date(),
                    "yyyyMMdd_HH_mm_ss");
            String tempDir = bdpJobConfig.getRealtaskhome();
            String prefix =Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
            String savepointLogFile = tempDir + prefix + "/savepoint_" + now + ".log";
            String savepointFile = tempDir + prefix + "/savepoint_" + now + ".sh";
            resShell.init(config.getFLinkHost(), config.getFLinkPort(),
                    config.getFLinkUserName(), config.getFLinkPwd());
            String res = resShell.getFile(task.getJobId());
            String jobId = parseRes(res, 0);
            String savepointPath = "hdfs:///user/flink/savepoint_" + menuId;
            StringBuilder savepoint = new StringBuilder(" source /etc/profile &&  nohup  flink savepoint ");
            savepoint.append(jobId).append(" ").append(savepointPath)
                    .append(" -yid ").append(task.getApplicationId()).append("  >")
                    .append(savepointLogFile)
                    .append(" 2>&1 ").append(" & \n");;
            resShell.uploadFile(savepoint.toString(), savepointFile);
            resShell.execCmd("sh "+savepointFile);
            resShell.close();
            ScheduledExecutorService pool = Executors.newScheduledThreadPool(1);
            poolMap.put(task.getMenuId(),pool);
            AtomicInteger num= new AtomicInteger(1);
            pool.scheduleAtFixedRate(() -> {
                ExecuteShellUtil cancelShell=null;
                try {
                    cancelShell = ExecuteShellUtil.getInstance();
                    cancelShell.init(config.getFLinkHost(), config.getFLinkPort(),
                            config.getFLinkUserName(), config.getFLinkPwd());
                    String savepointPathStr = cancelShell.getFile(savepointLogFile);
                    String savepointPathNew = parseRes(savepointPathStr, 2);
                    if (StringUtils.isNotEmpty(savepointPathNew)) {
                        // flink cancel  -s hdfs://ns/user/flink/savepoint_356/savepoint-fffdc9-bdc812a6c500  fffdc9ee70068dc1d859022270a365ee
                        // -yid application_1605258954487_22474
                        String cancelLogFile = tempDir + prefix + "/cancel_" + now + ".log";
                        String cancelFile = tempDir + prefix + "/cancel_" + now + ".sh";
                        StringBuilder cancel = new StringBuilder();
                        cancel.append(" source /etc/profile \n  nohup  flink cancel ").append(" -s ")
                                .append(savepointPathNew).append(" ").append(jobId).append(" -yid ")
                                .append(task.getApplicationId())
                                .append(" >").append(cancelLogFile)
                                .append(" 2>&1 ").append(" & \n");
                        cancelShell.uploadFile(cancel.toString(), cancelFile);
                        cancelShell.execCmd(" sh "+cancelFile);
                        RealTimeTask param = new RealTimeTask();
                        param.setMenuId(task.getMenuId());
                        param.setSavepointPath(savepointPathNew);
                        param.setRunState(RunStatus.KILLED.getStatus());
                        realTimeTaskMapper.updateByMenuId(param);
                        pool.shutdown();
                        poolMap.remove(menuId);
                    }else{
                        num.getAndIncrement();
                        if (num.get() > 20) {//失败20次,两分钟后就关闭循环
                            pool.shutdown();
                            poolMap.remove(menuId);
                            log.error("任务暂停失败,日志文件:{}",savepointLogFile);
                        }
                    }
                } catch (Exception e) {
                    log.error("任务暂停失败{}",savepointLogFile,e);
                }finally {
                    if (cancelShell != null) {
                        cancelShell.close();
                    }
                }
            }, 2, 5, TimeUnit.SECONDS);//每五秒检查一次状态
        }  catch (Exception e) {
            log.warn("任务暂停失败： jobName:{}",task.getTaskName());
        }
        return null;
    }

    @Override
    public void cancel(RealTimeTaskVO task) {
            ExecuteShellUtil resShell = ExecuteShellUtil.getInstance();
            try {
                resShell.init(config.getFLinkHost(), config.getFLinkPort(),
                        config.getFLinkUserName(), config.getFLinkPwd());
                String res = resShell.getFile(task.getJobId());
                String tempDir = bdpJobConfig.getRealtaskhome();
                String now = task.getMenuId() + "_" + DateUtils.format(new Date(),
                        "yyyyMMdd_HH_mm_ss");
                String prefix =Constant.RealTimeTaskType.getDescByCode(task.getTaskType());
                String stop_task_CmdFile = tempDir + prefix + "/stop_task_" + now + ".sh";
                String stop_task_LogFile = tempDir + prefix + "/stop_task_" + now + ".log";
                if (StringUtils.isEmpty(task.getApplicationId())) {
                    task.setApplicationId(parseRes(res, 1));
                }
                if (task.getApplicationId().contains("\n")) {
                    String appId = task.getApplicationId().replaceAll("\n", "");
                    task.setApplicationId(appId);
                }
                StringBuilder command = new StringBuilder();
                String jobId = parseRes(res, 0);
                command.append("source /etc/profile \nflink cancel ")
                        .append(jobId).append(" >> ")
                        .append(stop_task_LogFile)
                        .append(" 2>&1 ").append(" & \n");
                command.append("yarn application -kill  ")
                        .append(task.getApplicationId()).append(" >> ")
                        .append(stop_task_LogFile)
                        .append(" 2>&1 ").append(" & \n ");

                resShell.uploadFile(command.toString(), stop_task_CmdFile);
                resShell.execCmd("sh "+stop_task_CmdFile);
                resShell.close();
                String[] nameNodes = bdpJobConfig.getYarnmasters().split(",");
                for (String nameNode : nameNodes) {
                    //网络接口停掉
                    //http://cdh-hadoop-2:8088/proxy/application_1601436782938_2073/jobs/4f57e8b9295670652e9cf9959d69791a/yarn-cancel
                    //http://10.74.20.69:8088/proxy/
                    String url = "http://" + nameNode + "/proxy/" + task.getApplicationId() + "/" + jobId + "/yarn-cancel";
                    try {
                        HttpUtil.get(url, 5000);
                        log.info("任务终止：appId{}\n jobId:{} \n url {}", task.getApplicationId(), jobId, url);
                    } catch (Exception e) {
                        log.warn("任务终止失败：任务menuId:{} ,appId :{}  jobId:{} \n url {}", task.getMenuId(), task.getApplicationId(), jobId, url);
                    }
                }
            }  catch (Exception e) {
                log.warn("任务终止失败： jobName:{},任务menuId:{}",task.getTaskName(),task.getMenuId());
            }
    }
}
