package cn.hloger.spider.demo.dataxDemo;


import cn.hloger.spider.demo.dataxDemo.bean.CreateJobVO;
import cn.hloger.spider.demo.dataxDemo.bean.DataxEsLog;
import cn.hloger.spider.demo.dataxDemo.bean.KafkaZrwsl;
import cn.hloger.spider.demo.dataxDemo.bean.Zrwsl;
import cn.hloger.spider.demo.dataxDemo.bean.ZrwslVO;
import cn.hloger.spider.demo.dataxDemo.utils.CommandLineUtil;
import cn.hloger.spider.demo.dataxDemo.verify.Constants;
import cn.hloger.spider.demo.dataxDemo.verify.ValidateUtils;
import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FilenameUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * 中电的datax调用方式
 *
 * @author pengzhuang
 * @date 2020/9/10 13:41
 */
@Slf4j
@Service
public class ZxService {
    /**
     * datax启动脚本路径
     */
    private static final String DATAX_SCRIPT = "/datax/bin/datax.py";
    /**
     * chunjun启动脚本路径
     */
    private static final String CHUNJUN_SCRIPT = "/chunjun/bin/chunjun-local.sh";
    /**
     * datax配置json路径
     */
    private static final String DATAX_PZJSON = "/datax/pzjson/";
    /**
     * datax保存本地临时文件的路径
     */
    private static final String DATAX_LOCALLOG = "/datax/locallog/";
    /**
     * chunjun配置json路径
     */
    private static final String CHUNJUN_PZJSON = "/chunjun/pzjson/";
    /**
     * dataxhome
     */
    private static final String DATAXHOME = "/home";
    /**
     * datax日志存放到kafka，topic名称，离线实时日志使用
     */
    @Value("${kafka.topic:t_doffline_dataxlog_offline_topic}")
    private String topic;
    /**
     * datax任务状态流转发消息到kafka，topic名称
     */
    @Value("${kafka.dsytopic:t_dsynchronous_dataxlog_dsynchronous_topic}")
    private String dsytopic;

    /**
     * datax任务信息发消息到kafka，topic名称
     */
    @Value("${kafka.rwinfotopic:t_dsynchronous_dataxlog_rwinfo_topic}")
    private String rwinfotopic;
    /**
     * datax传输文件主题，topic名称，用于文件集成清册统计
     */
    @Value("${kafka.fileTransporttInfTopic:t_dintegration_dataxfile_topic}")
    private String fileTransporttInfTopic;
    /**
     * datax日志存放到es，es索引名，默认dmp.dsynchronous.dataxlog
     */
    @Value("${log.indexName:dmp.dsynchronous.dataxlog}")
    private String logIndexName;
    /**
     * datax日志存放到es开关，open开，close关，默认开
     */
    @Value("${log.toEs:open}")
    private String logToEs;
    /**
     * 脏数据索引名称
     */
    @Value("${dirtyDataIndex:dmp.dsynchronous.dirtydataindex}")
    private String dirtyDataIndex;
    /**
     * 结果预览存放到es，es索引名，默认dmp.dsynchronous.rwjg
     */
    @Value("${rwjg.indexName:dmp.dsynchronous.rwjg}")
    private String rwjgIndexName;
    /**
     * 配置哪些平台的日志需要写入到kafka （集成01  标签02  离线03  共享04 脱敏05）用逗号分隔
     */
    @Value("${log.toKafka:03}")
    private String logToKafa;
    /**
     * datax插件最小堆内存（M）
     */
    @Value("${datax.xms:2048}")
    private String dataxXms;
    /**
     * datax插件最大堆内存（M）
     */
    @Value("${datax.xmx:2048}")
    private String dataxXmx;
    /**
     * datax logback配置文件名称，simplelogback屏蔽了第三方jar包的日志，去掉了线程名
     */
    @Value("${datax.logback:simplelogback}")
    private String dataxLogbackFile;
    /**
     * datax日志先保存到本地再上传到es开关
     */
    @Value("${datax.log.saveLocal:true}")
    private boolean saveLocalSwitch;
    /**
     *
     */
//    @Autowired
//    private EsRestApiService esRestApiService;
    /**
     *
     */
//    @Autowired
//    private //kafkaService

    /**
     * 写入到kafka的索引截取大小 20，文件传输集成清册从日志中截取需要的信息
     */
    public static final int NUMBER_OF_KAFKA_INDEX_20 = 20;
    /**
     * 数率speedcount计算取值 1000，计算传输速率的常量值
     */
    public static final int NUMBER_OF_SPEED_COUNT_SUB = 1000;
    /**
     * 数率speedcount计算截取值 4
     */
    public static final int NUMBER_OF_SPEED_COUNT_SUBSPLIT_4 = 4;

    /**
     * 执行DATAX 脚本
     */
    public Integer rwzx(CreateJobVO createJobVO) {
        String id = createJobVO.getZrwslId();
        String zrwpch = createJobVO.getZrwpch();
        String zrwId = createJobVO.getZrwId();
        String jobId = createJobVO.getJobId();
        // 为了兼容历史数据,如果老任务没有jobId,则直接取子任务主键Id
        jobId = StringUtils.isEmpty(jobId) ? zrwId : jobId;

        String rwId = createJobVO.getRwId();

        // 校验参数格式
        if (!ValidateUtils.verifyZrwId(jobId)) {
            throw new IllegalArgumentException("子任务id格式不正确");
        }
        if (!ValidateUtils.verifyPch(zrwpch)) {
            throw new IllegalArgumentException("子任务批次号格式不正确");
        }


        //初始化运行中
        int status = -1;
        log.info("任务id:{}子任务id:{}批次号:{}", rwId, zrwId, zrwpch);
        //根据节点选取方法获取可用节点
        //初始化一个子任务日志
        Zrwsl zrwsl = new Zrwsl();
        zrwsl.setId(id);
        zrwsl.setZrwpch(zrwpch);
        zrwsl.setZrwId(zrwId);
        zrwsl.setJobId(jobId);
        zrwsl.setRwId(rwId);
        zrwsl.setRwmc(createJobVO.getRwmc());
        zrwsl.setEngineMode(createJobVO.getEngineMode());

        // 判断存放json脚本文件的目录是否存在
        File tempFile = new File(DATAXHOME + DATAX_PZJSON);
        log.info("生成脚本文件，路径：{}", DATAXHOME + DATAX_PZJSON);
        if (!tempFile.exists()) {
            // 创建本地存放datax脚本的目录 /home/datax/pzjson/
            tempFile.mkdirs();
        }

        // 准备将参数中的json字符串保存为本地的json文件
        File jsonFile = new File(DATAXHOME + DATAX_PZJSON, FilenameUtils.getName(jobId + zrwpch));
        String jsonFileName = DATAXHOME + DATAX_PZJSON + jobId + zrwpch;
        log.info("jsonFileName：{}", jsonFileName);
        try (FileWriter fw = new FileWriter(jsonFile)) {
            log.info("生成jsonFileName完成：{}", jsonFileName);
            fw.write(createJobVO.getRwJson());

            // 组装启动命令
            String[] arguments = new String[]{"python3", DATAXHOME + DATAX_SCRIPT, "--jobid", jobId, "--pch",
                    zrwpch, "--logname", jobId + "_" + zrwpch, "--jvm", "-Xms" + dataxXms + "m -Xmx" + dataxXmx + "m",
                    "--logback", dataxLogbackFile, jsonFileName};

            //根据批次号在启动命令中的标注获取远程执行pid  ps aux |grep ....| grep ....|....
            String[] sshPid = new String[]{"/bin/sh /usr/bin/ps aux | grep " + zrwpch +
                    " |grep -v grep|awk \'{print $2}\'"};

            //将任务加入线程池
            log.info("线程启动:{}|{}", jobId, zrwpch);
            log.info("启动脚本为:{}", String.join(" ", arguments));
//            ThreadPoolManager.getInstance().addExecuteTask(new TbThread(jobId, zrwId, arguments, zrwsl,
//                    this, sshPid, null, createJobVO.getRwly()));
        } catch (Exception e) {
            log.error("执行DATAX 脚本异常", e);
        }
        return status;
    }

    /**
     * chunjun local模式执行任务
     *
     * @param createJobVO
     * @return
     */
    public Integer chunjunRwzx(CreateJobVO createJobVO) {
        String id = createJobVO.getZrwslId();
        String zrwpch = createJobVO.getZrwpch();
        String zrwId = createJobVO.getZrwId();
        String jobId = createJobVO.getJobId();
        if (StringUtils.isEmpty(jobId)) {
            // 为了兼容历史数据,如果老任务没有jobId,则直接取子任务主键Id
            jobId = zrwId;
        }
        String rwId = createJobVO.getRwId();

        //初始化运行中
        int status = -1;
        log.info("任务id:{}子任务id:{}批次号:{}", rwId, zrwId, zrwpch);
        //根据节点选取方法获取可用节点
        //初始化一个子任务日志
        Zrwsl zrwsl = new Zrwsl();
        zrwsl.setId(id);
        zrwsl.setZrwpch(zrwpch);
        zrwsl.setZrwId(zrwId);
        zrwsl.setJobId(jobId);
        zrwsl.setRwId(rwId);
        zrwsl.setRwmc(createJobVO.getRwmc());
        zrwsl.setEngineMode(createJobVO.getEngineMode());
        //将json临时保存到chunjun节点，避免回调同步获取json
        File tempFile = new File(DATAXHOME + CHUNJUN_PZJSON);
        if (!tempFile.exists()) {
            tempFile.mkdirs();
            log.info("创建目录成功：{}", DATAXHOME + CHUNJUN_PZJSON);
        }
        String jsonFileName = DATAXHOME + CHUNJUN_PZJSON + jobId + zrwpch;
        File jsonFile = new File(DATAXHOME + CHUNJUN_PZJSON, FilenameUtils.getName(jobId + zrwpch));
        log.info("jsonFileName：{}", jsonFileName);

        try (FileWriter fw = new FileWriter(jsonFile)) {
            fw.write(createJobVO.getRwJson());
            // 校验参数格式
            if (!ValidateUtils.verifyZrwId(jobId)) {
                throw new IllegalArgumentException("子任务id格式不正确");
            }
            if (!ValidateUtils.verifyPch(zrwpch)) {
                throw new IllegalArgumentException("子任务批次号格式不正确");
            }

            //服务器路径
            String[] arguments = new String[]{"/bin/sh", "/home/chunjun/bin/chunjun-local.sh", "-flinkConfDir", "/home/chunjun/conf", "-job", jsonFileName};
            log.info("arguments：{}", arguments);
            //根据批次号在启动命令中的标注获取远程执行pid  ps aux |grep ....| grep ....|....
            String[] sshPid = new String[]{"/bin/sh /usr/bin/ps aux | grep " + zrwpch +
                    " |grep -v grep|awk \'{print $2}\'"};

            //将任务加入线程池
            log.info("线程启动:{}|{}", jobId, zrwpch);
            String join = String.join(" ", arguments);
            log.info(join);
//            ThreadPoolManager.getInstance().addExecuteTask(new TbThread(jobId, zrwId, arguments, zrwsl,
//                    this, sshPid, null, createJobVO.getRwly()));
        } catch (Exception e) {
            log.error("执行DATAX 脚本异常", e);
            //将异常信息保存到es
//            try {
////                ElasticsearchExtendUtil.createData(e.toString(), logIndexName, Constants.ES_DOC);
//            } catch (IOException ex) {
//                log.error("将异常信息保存到es失败", ex);
//            }
            KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
            kafkaZrwsl.setZrwsl(zrwsl);
            zrwsl.setZxjg(1);
            kafkaZrwsl.setMsgType("MsgTypeEnum.TYPE_2.getCode()");
//            kafkaZrwsl.setMsgType(MsgTypeEnum.TYPE_2.getCode());
            log.info("任务执行异常，将任务状态置为失败,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
            //kafkaService
        }
        return status;

    }

    /**
     * 线程
     */
    public class TbThread implements Runnable {

        private String zrwId;
        private String jobId;
        private String[] arguments;
        private Zrwsl zrwsl;
        private ZrwslVO zrwslVo;
        private ZxService zxService;
        private String[] sshPid;
        private String ip;
        private String rwly;

        public TbThread(String jobId, String zrwId, String[] arguments, Zrwsl zrwsl,
                        ZxService zxService,
                        String[] sshPid, String ip, String rwly) {
            // 长任务Id
            this.zrwId = zrwId;
            // Long 类型任务Id
            this.jobId = jobId;
            this.arguments = arguments;
            this.zrwsl = zrwsl;
            this.zxService = zxService;
            this.sshPid = sshPid;
            this.ip = ip;
            this.rwly = rwly;
            this.zrwslVo = new ZrwslVO();
            this.zrwslVo.setZrwsl(zrwsl);
        }

        @Override
        public void run() {
            //大于0的为失败 初始化一个失败的值
            int status = 1;
            try {
                log.info("开始执行启动command：{}", arguments);
                Process process = CommandLineUtil.exec(arguments);

                //更新为执行中
                updateZrwRunning();

                InputStream processInputStream = process.getInputStream();
                String streamJobId = jobId + zrwsl.getZrwpch();
                String zrwslPch = zrwsl.getZrwpch();
                StreamGobbler outputGobbler = new StreamGobbler(processInputStream, "Output", streamJobId,
                        "error_" + zrwId + "_" + zrwslPch, zrwsl, rwly);

                outputGobbler.start();
                outputGobbler.join();

                log.info("{} | {}---等待调用结果...........", jobId, zrwsl.getZrwpch());
                // 等待子线程执行完毕
                status = process.waitFor();
                log.info("{} | {}---命令行调用结果状态: {}", jobId, zrwsl.getZrwpch(), status);
                status = outputGobbler.zxjg;
                log.info("{} | {}---日志返回任务状态: {}", jobId, zrwsl.getZrwpch(), status);

//                // 发送kafka日志的结束标识
//                if (rwly != null && logToKafa.contains(rwly) && "open".equals(logToEs)) {
//                    sendTbrwJgToKafka(status);
//                }

                // 任务运行结束,删除本地的json脚本文件
                List<String> needDeleteFile = outputGobbler.getNeedDeleteFile();
                zrwslVo.setNeedDeleteFile(needDeleteFile);
                // 回调同步引擎，更改任务状态
                callbackRwStatus(status);

                // 回调同步provider，发送指标
//                bsjToEs(zrwsl.getId(), zrwsl.getZrwpch(), outputGobbler, status);

                //更新执行结果
                if (outputGobbler.totalFailCount > 0 && CollectionUtil.isNotEmpty(outputGobbler.needRecordList)) {
                    callbackErrorMessage(outputGobbler);
                }

                //任务执行结束删除任务json  /home/datax/pzjson/2022042215071376326xe74n8a2
                log.info("删除临时文件 {}", arguments[(arguments.length - 1)]);
                // 获取本地任务json脚本文件名
                String filename = arguments[(arguments.length - 1)].split("/")
                        [arguments[(arguments.length - 1)].split("/").length - 1];
                log.info("删除临时文件 {}", filename);
                // 进行文件删除操作
                deleteTempRwJsonFile(filename);
                deleteChunjunTempRwJsonFile(filename);
                //最后将缓存的本地log日志上传到es
                if (saveLocalSwitch) {
//                    log.error("[run] zrwId:{} pch:{} 准备进行本地日志上传ES  ", zrwsl.getZrwId(), zrwsl.getZrwpch());
//                    SpringApplicationContextUtil.getBean(EsServiceImpl.class).uploadLocalLogToEs
//                            (DATAXHOME + DATAX_LOCALLOG, zrwsl.getRwId(), zrwsl.getZrwId(), zrwsl.getZrwpch());
                }
            } catch (Exception e) {
                try {
                    DataxEsLog dataxEsLog = new DataxEsLog();
                    dataxEsLog.setRwId(zrwsl.getRwId());
                    dataxEsLog.setJobId(zrwsl.getZrwId());
                    dataxEsLog.setRwpch(zrwsl.getZrwpch());
                    dataxEsLog.setJsbs("0");
                    dataxEsLog.setCjsj(System.nanoTime());
                    dataxEsLog.setLog(e.toString());
                    String jsonString = JSON.toJSONString(dataxEsLog);
//                    ElasticsearchExtendUtil.createData(jsonString, logIndexName, Constants.ES_DOC);
                } catch (Exception ex) {
                    ex.printStackTrace();
                    log.error("[run] zrwId:{} pch:{} 将异常信息保存到es失败  ", zrwsl.getZrwId(), zrwsl.getZrwpch(), ex);
                }
                Thread currentThread = Thread.currentThread();
                currentThread.interrupt();
            }
        }

        /**
         * 更新子任务为运行中状态
         */
        private void updateZrwRunning() {
            zrwsl.setZxjg(-1);
            try {
                // 将子任务实例状态由待运行 -2 改为运行中 -1  updateZrwsl
                // 改为向kafka发送消息
                KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
                kafkaZrwsl.setZrwsl(zrwsl);
                kafkaZrwsl.setMsgType("MsgTypeEnum.TYPE_1.getCode()");
                log.info("任务变为运行中,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
                //kafkaService
            } catch (Exception e) {
                log.error("[exception] id: {} jobId:{}，update zrwsl status -1 throw kafka failed", zrwsl.getId(), jobId, e);
                /*log.info("通过kafka回调任务状态为-1失败，尝试使用接口回调");
                try{
                    com.cestc.baseinterface.dsynchronous.bean.Zrwsl zrwsl1 = new com.cestc.baseinterface.dsynchronous.bean.Zrwsl();
                    BeanUtils.copyProperties(zrwsl, zrwsl1);
                    SwaggerResultUtil<String> updatelog = tbyqCallbackService.updateZrwsl(zrwsl1);
                    if (!(updatelog != null && updatelog.getCode() == HttpCode.SUCCESS)) {
                        log.error("id: {} jobId:{}，接口重试回调失败 update zrwsl status -1 failed", zrwsl.getId(), zrwId);
                    } else {
                        log.info("接口重试回调成功 , call back update status -1 executing,result: {}  ", zrwId, updatelog);
                    }
                } catch (Exception ex) {
                    log.error("接口重试回调发生异常[exception] id: {} jobId:{}，update zrwsl status -1 failed", zrwsl.getId(), zrwId, ex);
                }*/


            }

        }

        /**
         * 任务异常信息更新
         */
        private void updateRwException(int status, Exception e) {
            try {
                log.error("Exception info ", e);
                //出现异常   更新子任务状态为1失败
                //异常 记录小任务执行结果  可能是其他地方的异常
                log.warn("----------------Exception happned，datax execute result--{}", status);
                zrwsl.setZxjg(status);

                // 改为向kafka发送消息
                KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
                kafkaZrwsl.setZrwsl(zrwsl);
                kafkaZrwsl.setMsgType("MsgTypeEnum.TYPE_1.getCode()");
                log.info("任务变为运行中,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
                //kafkaService

                // 改为向kafka发送消息
                KafkaZrwsl kafkaZrwsl2 = new KafkaZrwsl();
                kafkaZrwsl2.setZrwslVO(zrwslVo);
                kafkaZrwsl2.setMsgType("MsgTypeEnum.TYPE_2.getCode()");
                log.info("任务结束,准备回调,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl2));
                //kafkaService

                log.warn("----------------Exception happned，datax execute result:{}", kafkaZrwsl2);
            } catch (Exception ex) {
                log.error("----------------Exception happned in catch，datax execute result:{}", status, ex);
            }
        }

        private void callbackRwStatus(int status) {
            log.info("id: {} jobId:{}  execute callback,result:{}", zrwsl.getId(), jobId, status);
            zrwsl.setZxjg(status);
            zrwslVo.setZrwsl(zrwsl);
            try {
                // 任务执行结束后的回调，修改大任务实例状态,结束任务   updatecallback
                // 改为向kafka发送消息
                KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
                kafkaZrwsl.setZrwslVO(zrwslVo);
//                kafkaZrwsl.setMsgType(MsgTypeEnum.TYPE_2.getCode());
                kafkaZrwsl.setMsgType("MsgTypeEnum.TYPE_2.getCode()");
                log.info("任务结束,准备回调,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
                //kafkaService!

            } catch (Exception e) {
                log.error("[Exception] happned in id: {} jobId:{}  execute callback,result:{}", zrwsl.getId(),
                        jobId, status, e);
            }

        }

        /**
         * 回调异常信息到同步引擎.
         */
        private void callbackErrorMessage(StreamGobbler outputGobbler) {
            Long totalRead = outputGobbler.totalReadCount;
            Long totalWrite = totalRead - outputGobbler.totalFailCount;
            // 接口调用  保存任务异常信息
            int totalValue = totalRead.intValue();
            zrwsl.setReadSucceedRecords((long) totalValue);
            zrwsl.setWriteSucceedRecords((long) totalWrite.intValue());
            log.info("保存任务异常信息 zrwsl: {}", zrwsl);
            try {
                // 回调异常信息保存接口 createDataxRwzxYcxx
                // 改为向kafka发送消息
                KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
                kafkaZrwsl.setZrwsl(zrwsl);
//                kafkaZrwsl.setMsgType(MsgTypeEnum.TYPE_4.getCode());
                log.info("回调异常信息到同步引擎,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
                //kafkaService

            } catch (Exception e) {
                log.error("[exception] id: {} jobId:{}，save exception info failed", zrwsl.getId(), jobId, e);
            }

        }

        /**
         * 发送任务运行指标
         */
//        private void bsjToEs(String id, String zrwpch, StreamGobbler outputGobbler, int status) {
//            outputGobbler.recordExceptionDatasToES();
//            //任务成功，更新失败成功条数到SS_ZRWSL表
//            long totalRead = outputGobbler.totalReadCount;
//            long totalFail = outputGobbler.totalFailCount;
//            long totalSuccess = totalRead - totalFail;
//            zrwsl.setId(id);
//            log.info("任务结束,设置任务成功失败记录数zrwsl: {}", zrwsl);
//            //保存结果预览数据到es
//            if ("open".equals(logToEs) && status == 0) {
//                JgylesVO jgylesVO = getJgylesVo(zrwpch, totalRead, totalFail, totalSuccess);
//                outputGobbler.recordPreviewToEs(jgylesVO);
//            }
//        }

        /**
         * 构造JgylesVO.
         */
//        private JgylesVO getJgylesVo(String zrwpch, long totalRead, long totalFail, long totalSuccess) {
//            JgylesVO jgylesVO = new JgylesVO();
//            jgylesVO.setRwid(zrwsl.getRwId());
//            jgylesVO.setZrwid(zrwId);
//            jgylesVO.setRwpch(zrwpch);
//            jgylesVO.setFail(Long.toString(totalFail));
//            jgylesVO.setSuccess(Long.toString(totalSuccess));
//            jgylesVO.setTotal(Long.toString(totalRead));
//            return jgylesVO;
//        }

        /**
         * 删除临时json配置文件
         */
        private void deleteTempRwJsonFile(String fileName) {
            try {
                // 获取文件名，sonar漏洞修改，需写死前缀路径，只传文件名
                File file = new File("/home/datax/pzjson/", FilenameUtils.getName(fileName));
                if (file.isFile() && file.exists()) {
                    file.delete();
                    log.info("删除单个文件{}成功！", fileName);
                }
            } catch (Exception e) {
                log.error("删除任务json发生异常", e);
            }
        }

        private void deleteChunjunTempRwJsonFile(String fileName) {
            try {
                // 获取文件名，sonar漏洞修改，需写死前缀路径，只传文件名
                File file = new File("/home/chunjun/pzjson/", FilenameUtils.getName(fileName));
                if (file.isFile() && file.exists()) {
                    file.delete();
                    log.info("删除单个文件{}成功！", fileName);
                }
            } catch (Exception e) {
                log.error("删除任务json发生异常", e);
            }
        }


//        private void sendJgYlToKafka(String rwId, String zrwId, String rwpch, int status) {
//            //发送kafka日志的结束标识
//            DataxKafkaLog kafkaLog = new DataxKafkaLog(rwId, zrwId, rwpch);
//            kafkaLog.setRemark("数据结果预览");
//            //插入结束消息体
//            kafkaLog.setJsbs(String.valueOf(status));
//            //kafkaService
//        }

//        /**
//         * 发送kafka日志的结束标识
//         *
//         * @param status 根据状态发送不同值
//         */
//        private void sendTbrwJgToKafka(int status) {
//            //发送kafka日志的结束标识
//            DataxKafkaLog kafkaLog = new DataxKafkaLog(zrwsl.getRwId(), zrwsl.getZrwId(), zrwsl.getZrwpch());
//            kafkaLog.setRemark("同步任务");
//            //插入结束消息体
//            if (status == 0) {
//                kafkaLog.setJsbs("1");
//            } else {
//                kafkaLog.setJsbs("0");
//            }
//            //kafkaService
//        }

        /**
         * 创建结果预览es索引
         */
        private boolean createRwjgEsIndex() {
            try {
//                if (!ElasticsearchUtil.isIndexExist(rwjgIndexName)) {
//                    return SpringApplicationContextUtil.getBean(EsServiceImpl.class).createRwjgIndex(rwjgIndexName);
//                }
            } catch (Exception e) {
                log.error("任务结果预览创建es索引失败", e);
                return false;
            }
            return true;
        }

        /**
         * datax任务结束，保存成功失败条数到DataxCestcJkzbTemp表
         */
        public void updateJkzb(long totalRead, long totalFail, Zrwsl zrwsl) {
            zrwsl.setReadSucceedRecords(totalRead);
            zrwsl.setWriteSucceedRecords(totalRead - totalFail);
            zrwsl.setTotalErrorRecords(totalFail);
            log.info("ready-update-table-SS_ZRWSL {}", zrwsl);

            // 将任务执行的指标信息返回同步provider保存 updateJkzb
            // 改为向kafka发送消息
            KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
            kafkaZrwsl.setZrwsl(zrwsl);
//            kafkaZrwsl.setMsgType(MsgTypeEnum.TYPE_3.getCode());
            log.info("更新任务记录数,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
            //kafkaService

            log.info("任务成功，更新失败成功条数到SS_ZRWSL表成功 update-table-SS_ZRWSL-success, {}", kafkaZrwsl);
        }

        /**
         *
         */
        public class StreamGobbler extends Thread {

            private InputStream is;
            private String type;
            private String jobid;
            private String rwly;
            private String rwhepch;//es索引，ERROR_${jobId}_批次号
            private List<String> needRecordList = new ArrayList<>();
            private boolean dirtyDataFlag = false;
            private List<String> previewRecordList = new ArrayList<>();
            private boolean previewDataFlag = false;
            private Zrwsl zrwsl;
            private long totalReadCount = 0;
            private long totalFailCount = 0;
            private int zxjg = 1;
            private double timeCount = 0;//任务总时长，单位秒，用于计算成功字节数
            private double speedCount = 0;//平均流量，单位b/s，用于计算成功字节数
            private List<String> needDeleteFile = new ArrayList<>();
            //用于组装插入到kafka的日志信息
            private StringBuilder sb = new StringBuilder();
            //判断一条日志行是否结束
            private boolean jsFlag = false;
            //常量设置结果预览对比值
            private String compareMessResult = "StdoutPluginCollector - 结果预览";
            //常量设置脏数据对比值
            private String compareDirtyMess = "StdoutPluginCollector - 脏数据: ";
            //写入es运行日志中脏数据数目
            private AtomicLong dirtyRecordNum = new AtomicLong();

            private String errLogFormatRes = null;

            private FileWriter fw;

            public StreamGobbler(InputStream is, String type, String jobid, String rwhepch, Zrwsl zrwsl, String rwly) {
                this.is = is;
                this.type = type;
                this.jobid = jobid;
                this.rwhepch = rwhepch;
                this.zrwsl = zrwsl;
                this.rwly = rwly;
            }

            @Override
            public void run() {
                log.info("read log thread starting");
                try {
                    File tempFilePath = new File(DATAXHOME + DATAX_LOCALLOG);
                    if (!tempFilePath.exists()) {
                        // 创建本地存放datax脚本的目录 /home/datax/locallog/
                        tempFilePath.mkdirs();
                    }
                    //先将日志保存到本地磁盘
                    File localLog = new File(DATAXHOME + DATAX_LOCALLOG, FilenameUtils.getName(zrwId + zrwsl.getZrwpch()));
                    this.fw = new FileWriter(localLog);
                } catch (Exception e) {
                    log.error("缓存日志到本地异常", e);
                }

                try (InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr)) {
                    String line;
                    while ((line = br.readLine()) != null) {
                        // 日志优化，结果预览不打印,新需求打印脏数据信息
                        boolean previewRecordFilter = (!line.contains(compareMessResult) && !previewDataFlag);
                        // 保存消息到es
                        if (previewRecordFilter && Constants.ES_LOG_OPEN.equals(logToEs)) {
                            // 判断日志中是否包含脏数据 ,取不超过10条脏数据保存到ES 日志中
                            if (!((line.contains(compareDirtyMess) || dirtyDataFlag) && dirtyRecordNum.longValue() > Constants.DATA_PREVEW)) {
                                // TODO Datax 输出日志保存 到ES 代码
                                recordLogToES(line, "0", zrwsl.getEngineMode());
                            }
                            // 对脏数据中的日志进行展示的优化，对异常信息进行翻译中文提示
                            if (dirtyRecordNum.longValue() < 10) {
                                // 异常日志优化
                                translateErrorInfo(line);
                            }
                        }

//                        if (EngineLxEnum.DATAX.getName().equalsIgnoreCase(zrwsl.getEngineMode())) {
//                            // 保存脏数据到es，用于补数据任务
//                            if (line.contains(compareDirtyMess)) {
//                                dirtyDataFlag = true;
//                            }
//                            if (!line.contains(compareDirtyMess) && dirtyDataFlag) {
//                                log.debug(line.replaceAll("[\r\n]", ""));
//                                needRecordList.add(line);
//                                dirtyRecordNum.getAndIncrement();
//                                dirtyDataFlag = false;
//                            }
//
//                            //保存结果预览数据到es
//                            if (line.contains(compareMessResult)) {
//                                previewDataFlag = true;
//                            }
//                            //取第二行数据，并将标识置为false
//                            if (!line.contains(compareMessResult) && previewDataFlag) {
//                                previewRecordList.add(line);
//                                previewDataFlag = false;
//                            }
//                            // 判断输出日志中的待删除文件标识
//                            if (line.contains(Constants.NEED_DELETE_FILE)) {
//                                setNeedDeleteFile(line);
//                            }
//
//                            //文件同步信息发送到kafka,仅集成平台需要
//                            if (Constants.RWLY_DINTEGRATION.equals(rwly)) {
////                                recordFileResultToKafka(line);
//                            }
//                        }

                        // 获取输出日志中的各种统计指标
                        getTotalReadCount(line);
                        getTotalFailCount(line);
                        getTimeCount(line);
                        getSpeedCount(line);

                        // 判断是否包含 【exitCode】退出码 标识, 若有退出码，表名当前子任务实例刚好执行结束
                        callbackUpdateZrwsl(line);

                        //配置了kafka.topic就将日志写入到kafka，仅离线平台需要
                        if (rwly != null && logToKafa.contains(rwly) && Constants.ES_LOG_OPEN.equals(logToEs)) {
                            // TODO 给离线平台推送Datax 日志
                            recordLogToKafka(line);
                        }
                    }
                    // 存入最后一条日志 将日志写到kafka，仅离线平台需要
                    if (rwly != null && logToKafa.contains(rwly) && Constants.ES_LOG_OPEN.equals(logToEs)) {
                        // TODO 子任务运行结束,需要向kafka推送
                        //kafkaService
                    }
                    log.info("{}执行正常结束", jobid);
                } catch (Exception e) {
                    log.error("[Exception happened]", e);
                }
                try {
                    fw.close();
                } catch (IOException e) {
                    log.error("缓存日志到本地异常", e);
                }
            }


            /**
             * 优化异常信息提示
             *
             * @param line
             */
            private void translateErrorInfo(String line) {
                String errLogFormat = null;
                if (line.contains("connect timed out")) {
                    errLogFormat = "引擎运行时出错，错误信息为：连接超时";
                } else if (line.contains("doesn't exist") && line.contains("Table")) {
                    errLogFormat = "引擎运行时出错，错误信息为：表不存在";
                } else if (line.contains("table or view does not exist")) {
                    errLogFormat = "引擎运行时出错，错误信息为：表或者视图不存在";
                } else if (line.contains("inconsistent datatypes")) {
                    errLogFormat = "引擎运行时出错，错误信息为：数据类型不一致";
                } else if (line.contains("Data too long for column") ||
                        line.contains("value too large for column") ||
                        line.contains("value too long for type")) {
                    errLogFormat = "引擎运行时出错，错误信息为：要写入数据超过目标端字段长度, 请检查目标端字段长度设置";
                } else if (line.contains("unique constraint")) {
                    errLogFormat = "引擎运行时出错，错误信息为：字段违反唯一索引约束, 请检查目标端索引限制";
                } else if (line.contains("invalid input syntax for")) {
                    errLogFormat = "引擎运行时出错，错误信息为：要写入字段出现字段类型不匹配，请检查源端和目标端字段类型设置";
                } else if (line.contains("ORA-00904") && line.contains("invalid identifier")) {
                    // 截取符号或者字段
                    String errorField = line.substring(line.lastIndexOf(": \"") + 3, line.lastIndexOf("\":"));
                    errLogFormat = "引擎运行时出错，错误信息为：无效的标识符,请检查源端或者目标端字段: " + errorField + " 是否存在";
                }

                if (!StringUtils.isEmpty(errLogFormat)) {
                    errLogFormatRes = errLogFormat;
                    recordLogToES(errLogFormat, "0", zrwsl.getEngineMode());
                }

                if (!StringUtils.isEmpty(errLogFormatRes) && line.contains("经DataX智能分析,该任务最可能的错误原因是:")) {
                    recordLogToES(errLogFormatRes, "0", zrwsl.getEngineMode());
                }
            }

            /**
             * 离线实时显示日志需求，就将日志写入到kafka，
             * 需要一个日期开头一条消息
             */
            private void recordLogToKafka(String line) {
                if (!org.apache.commons.lang3.StringUtils.isNoneBlank(topic)) {
                    return;
                }
                //匹配xxxx-xx-xx xx:xx:xx.xxx日志开头的时间戳
                String rexp = "\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{3}(.*?)";
                Pattern p = Pattern.compile(rexp);
                Matcher m = p.matcher(line);
                //这里主要是一条日志可能存在换行
                if (m.matches()) {
                    //首次匹配到一条日志的开头，jsFlag为false，不会保存
                    if (jsFlag) {
                        //将日志写到kafka
                        //kafkaService
                    }
                    //将jsFlag置为true，第二次匹配到另一条日志的开头时就能清空sb，将上一条完整的日志作为一个message插到kafka
                    jsFlag = true;
                    //另一条日志的开头，清空string
                    sb.setLength(0);
                }
                sb.append(line);
                sb.append("\\n");
            }

            /**
             * 文件传输结果写入到kafka
             */
//            private void recordFileResultToKafka(String line) {
//                if (!line.contains("transport file info:")) {
//                    return;
//                }
//                try {
//                    int index = line.lastIndexOf("transport file info:");
//                    String fileTransportInfo = line.substring(index + NUMBER_OF_KAFKA_INDEX_20);
//                    List<SourceDestFileSize> sourceDestFileSizes = JSON.parseArray(fileTransportInfo,
//                            SourceDestFileSize.class);
//
//                    List<WjJcXx> wjJcXxList = new ArrayList<>(Constants.DEFAULT_COLLECTION_SIZE);
//                    for (SourceDestFileSize temp : sourceDestFileSizes) {
//                        WjJcXx wjJcXx = new WjJcXx();
//                        wjJcXx.setRwId(zrwsl.getRwId());
//                        wjJcXx.setZrwId(zrwsl.getZrwId());
//                        wjJcXx.setZrwpch(zrwsl.getZrwpch());
//                        wjJcXx.setLywjlj(temp.getSourcePath());
//                        // 设置实际的目标端文件存储路径
//                        wjJcXx.setMbcclj(temp.getActualdestPath());
//                        wjJcXx.setWjdx(temp.getFileSize());
//                        wjJcXxList.add(wjJcXx);
//                    }
//                    if (!wjJcXxList.isEmpty()) {
//                        //kafkaService
//                    }
//                } catch (Exception ex) {
//                    log.error("fileToKafkaFailed:", ex);
//                }
//            }
            private void callbackUpdateZrwsl(String line) {
                if (!line.contains("exitCode")) {
                    return;
                }
                int index = line.lastIndexOf(':');
                String status = line.substring(index + 1);
                zrwsl.setZxjg(Integer.valueOf(status));
                zxjg = Integer.valueOf(status);
                try {
                    log.info("修改zrwsl开始:{}", JSON.toJSONString(zrwsl));
                    //
                    // 第二次修改子任务状态：将子任务实例改为 0 或者 大于0 ，并修改子任务结束时间
                    // 改为向kafka发送消息
                    KafkaZrwsl kafkaZrwsl = new KafkaZrwsl();
                    //不管任务成功还是失败都更新成功失败条数
                    long totalSuccess = totalReadCount - totalFailCount;
                    zrwsl.setWriteReceivedBytes((long) (timeCount * speedCount));
                    zrwsl.setReadSucceedRecords(totalReadCount);
                    zrwsl.setWriteSucceedRecords(totalSuccess);
                    zrwsl.setTotalErrorRecords(totalFailCount);
                    kafkaZrwsl.setZrwsl(zrwsl);
                    kafkaZrwsl.setMsgType("MsgTypeEnum.TYPE_1.getCode()");
                    log.info("任务变为运行中,开始往kafka发送消息：{}", JSON.toJSONString(kafkaZrwsl));
                    //kafkaService
                } catch (Exception e) {
                    log.error("修改zrwsl异常：{}", JSON.toJSONString(zrwsl), e);
                }

            }

            private void getSpeedCount(String line) {
                if (!line.contains("任务平均流量")) {
                    return;
                }
                String speedTemp = line.replace(" ", "");
                String speed = speedTemp.substring(speedTemp.indexOf(':') + 1);
                if (speed.contains("MB/s")) {
                    speedCount = Double.parseDouble(speed.substring(0, speed.length() - NUMBER_OF_SPEED_COUNT_SUBSPLIT_4)) * NUMBER_OF_SPEED_COUNT_SUB * NUMBER_OF_SPEED_COUNT_SUB;
                } else {
                    if (speed.contains("KB/s")) {
                        speedCount = Double.parseDouble(speed.substring(0, speed.length() - NUMBER_OF_SPEED_COUNT_SUBSPLIT_4)) * NUMBER_OF_SPEED_COUNT_SUB;
                    } else {
                        //单位为B/s
                        speedCount = Double.parseDouble(speed.substring(0, speed.length() - (NUMBER_OF_SPEED_COUNT_SUBSPLIT_4 - 1)));
                    }
                }
            }

            private void getTimeCount(String line) {
                if (line.contains("任务总计耗时")) {
                    String timeTemp = line.replace(" ", "");
                    String time = timeTemp.substring(timeTemp.indexOf(':') + 1);
                    timeCount = Double.parseDouble(time.substring(0, time.length() - 1));
                }
            }

            //获取待删除文件
            private void setNeedDeleteFile(String line) {
                //获取待删除文件
                String str = line.substring(line.lastIndexOf(Constants.NEED_DELETE_FILE) + 1);
                str = str.substring(5);
                JSONArray objects = JSON.parseArray(str);
                List<String> arrayLists = objects.toJavaList(String.class);
                needDeleteFile.addAll(arrayLists);
            }

            //获取待删除文件
            private List<String> getNeedDeleteFile() {
                return needDeleteFile;
            }

            private void getTotalFailCount(String line) {
                if (line.contains("读写失败总数") || line.contains("文件失败写入")) {
                    String failCountTemp = line.replace(" ", "");
                    String data = failCountTemp.substring(failCountTemp.indexOf(':') + 1);
                    log.debug(data.replaceAll("[\r\n]", ""));
                    totalFailCount = Long.parseLong(data);
                }
            }

            private void getTotalReadCount(String line) {
                if (line.contains("读出记录总数") || line.contains("文件成功读取")) {
                    String totalCountTemp = line.replace(" ", "");
                    String data = totalCountTemp.substring(totalCountTemp.lastIndexOf(':') + 1);
                    log.debug(data.replaceAll("[\r\n]", ""));
                    totalReadCount = Long.parseLong(data);
                }
                if (line.contains("定制文件迁移成功读取文件")) {
                    totalReadCount++;
                }
            }

            /**
             * 异常写入es
             */
            public void recordExceptionDatasToES() {
                if (needRecordList == null || needRecordList.isEmpty()) {
                    return;
                }
                if (!createEsIndex()) {
                    log.info("创建es索引失败，导致无法存入es");
                    return;
                }
                for (String data : needRecordList) {
                    try {
                        Map<String, String> map = new HashMap<>();
                        map.put("info", data);
                        map.put("rwhepch", rwhepch);
                        String realData = JSON.toJSONString(map);
//                        ElasticsearchExtendUtil.createData(realData, dirtyDataIndex, Constants.ES_DOC);
                        // esRestApiService.createData(realData, dirtyDataIndex, Constants.ES_DOC);
                    } catch (Exception e) {
                        log.error("该条入库es失败:" + data, e);
                    }
                }
            }

            /**
             * 保存结果预览信息到es
             */
//            public void recordPreviewToEs(JgylesVO jgylesVO) {
//                if (!createRwjgEsIndex()) {
//                    log.error("任务结果预览创建索引失败,无法存入");
//                    return;
//                }
//                String realData = null;
//                try {
//                    if (CollectionUtil.isNotEmpty(previewRecordList)) {
//                        List<String> columnNameList = new ArrayList<>(Constants.DEFAULT_COLLECTION_SIZE);
//                        List<Map<String, Object>> dataList = new ArrayList<>(Constants.DEFAULT_COLLECTION_SIZE);
////                        for (String record : previewRecordList) {
////                            List<Column> columnList = JSON.parseObject(record, DataxColumn.class).getData();
////                            Map<String, Object> column = new HashMap<>(Constants.DEFAULT_COLLECTION_SIZE);
//////                            for (Column column1 : columnList) {
//////                                String columnName = column1.getName();
//////                                //脱敏需求，去掉列名飘号和双引号
//////                                if (columnName.startsWith("`") || columnName.startsWith("\"")) {
//////                                    columnName = columnName.substring(1, columnName.length() - 1);
//////                                }
////////                                column.put(columnName, RawDataCaster.castRawData(column1));
//////                                //列名list只用插一次
//////                                if (columnNameList.size() < columnList.size()) {
//////                                    columnNameList.add(columnName);
//////                                }
////                            }
////                            dataList.add(column);
////                        }
//
////                        jgylesVO.setColumnamelist(JSON.toJSONString(columnNameList));
////                        jgylesVO.setDatalist(JSON.toJSONString(dataList));
//                    }
//
////                    realData = JSON.toJSONString(jgylesVO);
////                    log.info("构造数据预览List {}", jgylesVO);
////
////                    esRestApiService.createData(realData, rwjgIndexName, Constants.ES_DOC,
////                            jgylesVO.getZrwid() + jgylesVO.getRwpch());
//                    log.info("rwId：{} zrwId:{} rwpch:{} 查询保存预览结果到es成功", zrwsl.getRwId(),
//                            zrwsl.getZrwId(), zrwsl.getZrwpch());
//                    if (rwly != null && logToKafa.contains(rwly)) {
//                        sendJgYlToKafka(zrwsl.getRwId(), zrwsl.getZrwId(), zrwsl.getZrwpch(), 1);
//                    }
//                } catch (Exception e) {
//                    log.error("任务预览结果 【{}】写入es失败", realData, e);
//                    try {
//                        if (rwly != null && logToKafa.contains(rwly)) {
//                            sendJgYlToKafka(zrwsl.getRwId(), jobId, zrwsl.getZrwpch(), 0);
//                        }
//                    } catch (Exception ex) {
//                        log.error("结果预览状态写入kafka失败", ex);
//                    }
//                }
//            }
            private void recordLogToES(String line, String jsbs, String engineMode) {
                if (saveLocalSwitch) {
                    //防止es异常写日志阻塞导致任务执行卡顿，先写日志到es，任务执行完成后异步上传到es
                    try {
                        this.fw.write(line + '\n');
                    } catch (IOException e) {
                        log.error("缓存日志到本地异常", e);
                    }
                } else {
                    //在任务运行中直接保存日志到es
                    if (!createLogEsIndex()) {
                        log.error("datax日志任务创建索引失败,无法存入");
                        return;
                    }
                    try {
                        DataxEsLog dataxEsLog = new DataxEsLog();
                        dataxEsLog.setRwId(zrwsl.getRwId());
                        dataxEsLog.setJobId(zrwsl.getZrwId());
                        dataxEsLog.setRwpch(zrwsl.getZrwpch());
                        dataxEsLog.setLog(line);
                        dataxEsLog.setJsbs(jsbs);
                        dataxEsLog.setCjsj(System.currentTimeMillis());
                        String realData = JSON.toJSONString(dataxEsLog);
//                        ElasticsearchExtendUtil.createData(realData, logIndexName, Constants.ES_DOC);
                    } catch (Exception e) {
                        log.error("日志 【{}】写入es失败", line, e);
                    }
                }
            }

            private boolean createLogEsIndex() {
                try {
//                    if (!ElasticsearchExtendUtil.isIndexExist(logIndexName)) {
////                        return SpringApplicationContextUtil.getBean(EsServiceImpl.class).createLogIndex(logIndexName);
//                        return true;
//                    }
                } catch (Exception e) {
                    log.error("datax日志创建es索引失败", e);
                    return false;
                }
                return true;
            }

            /**
             * 创建es索引
             *
             * @return boolean
             */
            private boolean createEsIndex() {
                try {
//                    if (!ElasticsearchExtendUtil.isIndexExist(dirtyDataIndex)) {
//                        return ElasticsearchExtendUtil.createIndex(dirtyDataIndex);
//                    }
                } catch (Exception e) {
                    log.error("记录日志创建es索引失败：", e);
                    return false;
                }
                return true;
            }
        }

    }

    /**
     * 根据批次号获取pid
     */
    public String getPid(String jobId, String pch) throws IllegalArgumentException, IOException {

        if (!ValidateUtils.verifyZrwId(jobId)) {
            throw new IllegalArgumentException("子任务格式错误");
        }

        if (!ValidateUtils.verifyPch(pch)) {
            throw new IllegalArgumentException("批次号格式错误");
        }

        String pid = null;
        String cmd = "ps aux | grep '" + jobId + "' | grep '" + pch + "'";
        List<String> cmds = ValidateUtils.getCmdList(cmd);
        Process getpid = CommandLineUtil.exec(cmds.toArray(new String[cmds.size()]));
        log.info("get pid:{}", cmd);

        try (InputStreamReader is = new InputStreamReader(getpid.getInputStream());
             BufferedReader reader = new BufferedReader(is)) {
            String line = null;
            while ((line = reader.readLine()) != null) {
                log.info("ps aux 过滤信息：" + line.replaceAll("[\r\n]", ""));
                if (line.contains("java")) {
                    pid = line;
                }
            }
        } catch (Exception e) {
            log.error("根据批次号获取pid失败", e);
        }
        log.info(("get pid：" + pid).replaceAll("[\r\n]", ""));

        if (null != pid && StringUtils.hasLength(pid)) {
            String[] pidArr = pid.split("\\s+");
            pid = pidArr[1];
        }
        return pid;
    }

    /**
     * 通过pid kill掉进程
     */
    public void killPid(String pid) throws IllegalArgumentException, IOException, InterruptedException {
        if (!ValidateUtils.verifyZrwId(pid)) {
            throw new IllegalArgumentException("进程id格式不正确");
        }

        String cmd = "kill -9 " + pid;
        List<String> cmds = ValidateUtils.getCmdList(cmd);
        Process process = CommandLineUtil.exec(cmds.toArray(new String[cmds.size()]));
        int resCode = process.waitFor();
        log.info("killPid  process.waitFor():{}", resCode);
    }

//    /**
//     * 发送信息到kafka
//     *
//     * @param zrwsl  子任务实例对象
//     * @param logStr 日志字符串
//     * @retrun 返回拼接好的对象字符串
//     */
//    public String getDataxKafkaLogStr(Zrwsl zrwsl, String logStr) {
//        DataxKafkaLog kafkaLog = new DataxKafkaLog(zrwsl.getRwId(), zrwsl.getZrwId(), zrwsl.getZrwpch());
//        kafkaLog.setLog(logStr);
//        String jsonLog = JSON.toJSONString(kafkaLog).toString();
//        log.info("ready to send kafka string:{}");
//        return jsonLog;
//    }

}
