package cn.getech.data.development.utils.oozie;

import cn.getech.data.development.constant.*;
import cn.getech.data.development.dto.JobInfoDto;
import cn.getech.data.development.dto.JobNodeInfoDto;
import cn.getech.data.development.entity.JobInfo;
import cn.getech.data.development.entity.JobNodeConfig;
import cn.getech.data.development.entity.JobNodeInfo;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.DateUtils;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.oozie.client.*;
import org.apache.oozie.fluentjob.api.workflow.Workflow;

import javax.xml.bind.JAXBException;
import java.io.UnsupportedEncodingException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;


public class JobUtil {
    private static Logger logger = Logger.getLogger(JobUtil.class);
    private HdfsUtil hdfsUtil;
    private OozieClient wc;

    public JobUtil(HdfsUtil hdfsUtil){
        this.hdfsUtil = hdfsUtil;
    }

    public JobUtil(HdfsUtil hdfsUtil, OozieClient wc){
        this.hdfsUtil = hdfsUtil;
        this.wc = wc;
    }


    /**
     * 生成workflow任务并提交到hdfs并返回配置文件
     * @param procId  工程id
     * @param jobId
     * @param linkedNodeList 节点连线
     * @throws JAXBException
     * @throws UnsupportedEncodingException
     */
//    public Properties createOrUpdateJob(Integer procId, Integer jobId, List<List<JobNodeInfoDto>> linkedNodeList, Boolean isCoordinatorJob, Boolean isGenWorkFlow) throws JAXBException, UnsupportedEncodingException {
//
//        String dst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString();
//
//        //是否重新生成workflow
//        if(isGenWorkFlow){
//            WorkFlowUtil workFlowUtil = new WorkFlowUtil();
//            Workflow workflow = workFlowUtil.creatJobNodeShell(linkedNodeList,jobId,null);
//            hdfsUtil.writeFile(workflow.asXml().getBytes(), dst + "/workflow.xml");
//        }
//
//        //生成conf文件
//        //生成workflow.xml的必要参数
//        Properties conf = wc.createConfiguration();
//        conf.setProperty(OozieClient.USER_NAME, "hdfs");
//        conf.setProperty(OozieClient.APP_PATH, "hdfs://" + hdfsUtil.bdpJobConfig.getNamespace() + "/user/hdfs/" + dst);
//        conf.setProperty("resourceManager","yarnRM");
//        conf.setProperty("queueName","default");
//        conf.setProperty("nameNode","hdfs://" + hdfsUtil.bdpJobConfig.getNamespace());
//        conf.setProperty("rootDir","user/hdfs/" + dst);
//        if (!isCoordinatorJob){
//            conf.setProperty("time_hour", DateUtils.format(new Date(),DateUtils.DATE_TIME_PATTERN));
//        }
//        //赋值节点的名字
//        Set<String> allDataList = new HashSet<>();
//        for (List<JobNodeInfoDto> jobNodeInfoDtos : linkedNodeList) {
//            for (JobNodeInfoDto jobNodeInfoDto : jobNodeInfoDtos) {
//                allDataList.add(jobNodeInfoDto.getWorkFlowNodeName());
//            }
//        }
//        //结束的名字
//        allDataList.add(JobType.ObjOf(5).getEnName()+"_0");
//        for (String s : allDataList) {
//            String name = s;
//            if(s.equalsIgnoreCase("start_0")&&isCoordinatorJob){
//                name = "StartTwo_0";
//            }
//            conf.setProperty("EXEC" +s, name + ".sh");
//        }
//        return conf;
//    }




    public Properties createOrUpdateHaveConditionJob(Integer procId, String queueName, Integer jobId, List<List<JobNodeInfoDto>> linkedNodeList, Boolean isCoordinatorJob, Boolean isGenWorkFlow, Set<Integer> haveNullNodeIds, Integer reRunType, JobNodeConfig jobNodeConfig) throws JAXBException, UnsupportedEncodingException {

        String dst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString();
        String baseDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString();
        if(Objects.equals(ReRunTypeEnum.RE_CURRENT_NODE.getId(),reRunType)){
            baseDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString() +  "/" + ReRunTypeEnum.RE_CURRENT_NODE.getName();
        }
        if(Objects.equals(ReRunTypeEnum.RE_UP_NODE.getId(),reRunType)){
            baseDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString() +  "/" + ReRunTypeEnum.RE_UP_NODE.getName();
        }
        if(Objects.equals(ReRunTypeEnum.RE_DOWN_NODE.getId(),reRunType)){
            baseDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + procId.toString() + "/" + jobId.toString() +  "/" + ReRunTypeEnum.RE_DOWN_NODE.getName();
        }

        //是否重新生成workflow
        if(isGenWorkFlow){
            WorkFlowUtil workFlowUtil = new WorkFlowUtil();
            Workflow workflow = workFlowUtil.creatJobNodeHaveConditionShell(linkedNodeList,jobId,jobNodeConfig);
            String wfs = workflow.asXml();
            //失败的节点成功以后直接进入end节点
            String newWfs = wfs.replaceAll("<workflow:ok to=\"kill\"/>", "<workflow:ok to=\"end\"/>");
            hdfsUtil.writeFile(newWfs.getBytes(), baseDst + "/workflow.xml");
        }

        //生成conf文件
        //生成workflow.xml的必要参数
        Properties conf = wc.createConfiguration();
        conf.setProperty(OozieClient.USER_NAME, "hdfs");
        conf.setProperty(OozieClient.APP_PATH,"hdfs://"+hdfsUtil.bdpJobConfig.getNamespace()+"/user/hdfs/" + baseDst);
        conf.setProperty("resourceManager","yarnRM");
        conf.setProperty("queueName",queueName);
        conf.setProperty("nameNode","hdfs://" + hdfsUtil.bdpJobConfig.getNamespace());
        conf.setProperty("rootDir","user/hdfs/" + dst);
//        if(null != jobNodeConfig && Objects.equals(jobNodeConfig.getIsRetry(),1)){
//            if(null != jobNodeConfig.getReTryMax()){
//                conf.setProperty(OozieClient.ACTION_MAX_RETRIES,String.valueOf(jobNodeConfig.getReTryMax()));
//            }
//            if(null != jobNodeConfig.getReTryInterval()){
//                conf.setProperty(OozieClient.ACTION_RETRY_INTERVAL,String.valueOf(jobNodeConfig.getReTryInterval()));
//            }
//        }
        if (!isCoordinatorJob){
            conf.setProperty("time_hour", DateUtils.format(new Date(),DateUtils.DATE_TIME_PATTERN));
        }
        //赋值节点的名字
        Set<String> allDataList = new HashSet<>();
        for (List<JobNodeInfoDto> jobNodeInfoDtos : linkedNodeList) {
            for (JobNodeInfoDto jobNodeInfoDto : jobNodeInfoDtos) {
                allDataList.add(jobNodeInfoDto.getWorkFlowNodeName());
            }
        }

        //结束的名字
        allDataList.add(JobType.ObjOf(5).getEnName()+"_0");
        for (String s : allDataList) {
            String name = s;
            //替换成定时的shell
            if(s.equalsIgnoreCase("start_0")&&isCoordinatorJob){
                name = "StartTwo_0";
            }
            //替换成空置的shell
            if(CollectionUtil.isNotEmpty(haveNullNodeIds)){
                String[] split = s.split("_");
                if(null != split && split.length > 0){
                    Integer jobNodeId = Integer.valueOf(split[split.length - 1]);
                    if(haveNullNodeIds.contains(jobNodeId)){
                        name = VirtualTypeEnum.VIRTUAL_NULL.getName() + "_" + jobNodeId;
                    }
                }
            }
            conf.setProperty("EXEC" +s, name + ".sh");
        }
        return conf;
    }


    public Properties createOrUpdateJobLay(Integer procId, String queueName, int workMenuId, List<List<JobInfoDto>> allLinkedDtoList, Boolean isCoordinatorJob, Boolean isGenWorkFlow,JobNodeConfig jobNodeConfig) throws JAXBException, UnsupportedEncodingException {

        String dst = "workflow-app/" + WorkFlowType.WORKFLOW_LAY.getCode() + "/" + procId.toString() + "/" + workMenuId;
        //是否重新生成workflow
        if(isGenWorkFlow){
            WorkFlowUtil workFlowUtil = new WorkFlowUtil();
            Workflow workflow = workFlowUtil.creatShell(allLinkedDtoList,workMenuId,jobNodeConfig);
            hdfsUtil.writeFile(workflow.asXml().getBytes(), dst + "/workflow.xml");
        }

        //生成conf文件
        //生成workflow.xml的必要参数
        Properties conf = wc.createConfiguration();
        conf.setProperty(OozieClient.USER_NAME, "hdfs");
        conf.setProperty(OozieClient.APP_PATH,"hdfs://" + hdfsUtil.bdpJobConfig.getNamespace() + "/user/hdfs/" + dst);
        conf.setProperty("resourceManager","yarnRM");
        conf.setProperty("queueName",queueName);
        conf.setProperty("nameNode","hdfs://" + hdfsUtil.bdpJobConfig.getNamespace());
        conf.setProperty("rootDir","user/hdfs/" + dst);
        if (!isCoordinatorJob){
            conf.setProperty("time_hour", DateUtils.format(new Date(),DateUtils.DATE_TIME_PATTERN));
        }
        //赋值节点的名字
        Set<String> allDataList = new HashSet<>();
        for (List<JobInfoDto> jobInfoDtos : allLinkedDtoList) {
            for (JobInfoDto jobInfoDto : jobInfoDtos) {
                allDataList.add(jobInfoDto.getWorkFlowNodeName());
            }
        }
        //结束的名字
        allDataList.add("End_0");
        for (String s : allDataList) {
            String name = s;
            if(s.equalsIgnoreCase("start_0")&&isCoordinatorJob){
                name = "StartTwo_0";
            }
            conf.setProperty("EXEC" +s, name + ".sh");
        }
        return conf;

    }

    /**
     * 生成workflow任务并提交到hdfs并返回配置文件
     * @param userId
     * @param jobId
     * @param jobInfo
     * @throws JAXBException
     * @throws UnsupportedEncodingException
     */
    public Properties createOrUpdateJob(Long userId, Integer jobId, JobInfo jobInfo,Boolean isCoordinatorJob) throws JAXBException, UnsupportedEncodingException {
        WorkFlowUtil workFlowUtil = new WorkFlowUtil();
        //生成workflow.xml的必要参数
        Map<String, String> nameNode = new HashMap<>();
        JSONObject jsonObject = JSONObject.parseObject(jobInfo.getParam());
        List nodeDataArray = jsonObject.getJSONArray("nodeDataArray");
        List linkDataArray = jsonObject.getJSONArray("linkDataArray");
        String startKey = "";
        for (int i = 0; i < nodeDataArray.size(); i++){
            JSONObject nodeData = JSONObject.parseObject(nodeDataArray.get(i).toString());
            nameNode.put(nodeData.get("key").toString(), nodeData.get("category").toString());
            if (StringUtils.equalsIgnoreCase(nodeData.get("category").toString(),"Start")){
                startKey = nodeData.get("key").toString();
            }
        }

        String dst = "workflow-app/" + userId.toString() + "/" + jobId.toString();
        Workflow workflow = workFlowUtil.creatShell(nameNode,linkDataArray,jobId.toString(),startKey,isCoordinatorJob);
        hdfsUtil.writeFile(workflow.asXml().getBytes(), dst + "/workflow.xml");

        //生成conf文件
        Properties conf = wc.createConfiguration();
        conf.setProperty(OozieClient.USER_NAME, "hdfs");
        conf.setProperty(OozieClient.APP_PATH,"hdfs://"+hdfsUtil.bdpJobConfig.getNamespace()+"/user/hdfs/" + dst);
        conf.setProperty("resourceManager","yarnRM");
        conf.setProperty("queueName","default");
        conf.setProperty("nameNode","hdfs://"+hdfsUtil.bdpJobConfig.getNamespace());
        conf.setProperty("rootDir","user/hdfs/" + dst);
        if (!isCoordinatorJob){
            conf.setProperty("time_hour", DateUtils.format(new Date(),DateUtils.DATE_TIME_PATTERN));
        }

        Set<String> keySet = nameNode.keySet();
        Iterator<String> it = keySet.iterator();
        while (it.hasNext()){
            String key = it.next();
            String name = nameNode.get(key);
            if (name.equalsIgnoreCase("start_0")&&isCoordinatorJob)
                name = "StartTwo";
            conf.setProperty("EXEC" +key.replace("-",""), name + key.replace("-","") + ".sh");
        }

        return conf;
    }

    /**
     * 删除hdfs的workflow及shell
     * @param userId
     * @param jobId
     * @return
     */
    public boolean deleteJob(Long userId, Integer jobId,String oozieJobId) throws OozieClientException {
        String dst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/"  + userId.toString() + "/" + jobId.toString();
        if (null!=oozieJobId){
            try {
                wc.kill(oozieJobId);
            }catch (Exception e){
                logger.error("停止任务失败！oozieId:"+oozieJobId);
            }
        }
        return hdfsUtil.delete(dst);
    }

    /**
     * 删除hdfs的shell
     * @param userId
     * @param jobInfo
     ** @param jobNodeInfo
     * @return
     */
    public boolean deleteJob(Long userId, JobInfo jobInfo, JobNodeInfo jobNodeInfo)   {
        String nodeKey = jobNodeInfo.getNodeKey().substring(1);
        String dst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/"  + userId.toString() + "/" + jobInfo.getId().toString()+"/"+ JobType.ObjOf(jobNodeInfo.getTypeId()).getEnName() +nodeKey+".sh";
        return hdfsUtil.delete(dst);
    }

    /**
     * 执行oozie及将oozieJobId存入数据库
     * @param conf
     * @throws OozieClientException
     */
    public String runJob(Properties conf) throws OozieClientException {
        String oozieJobId = wc.run(conf);
        return oozieJobId;
    }

    /**
     * 停止oozie任务
     * @param oozieJobId
     * @throws OozieClientException
     */
    public void stopJob(String oozieJobId){
        if(oozieJobId!=null){
            try {
//                String oozieStatus=getJobStatus(oozieJobId);
//                if (oozieStatus.toUpperCase().equals("RUNNING"))
//                    wc.kill(oozieJobId);
                wc.kill(oozieJobId);
            }catch (Exception e){
                logger.error("停止任务失败！oozieId:"+oozieJobId);
            }
        }
    }

    /**
     * 获得oozie的log
     * @param oozieJobId
     * @return
     * @throws OozieClientException
     */
    public String getJobLog(String  oozieJobId) throws OozieClientException {
        return wc.getJobLog(oozieJobId);
    }

    /**
     * 获得任务执行状态
     * @param oozieJobId
     * @return
     */
    public String getJobStatus(String oozieJobId) throws OozieClientException {
        String oozieStatus = wc.getStatus(oozieJobId);
        return oozieStatus;
    }

    /**
     * 获得action的状态
     * @param oozieJobId
     * @return
     */
    public List getAction(String  oozieJobId) throws OozieClientException {
        List<WorkflowAction> actions = wc.getJobInfo(oozieJobId).getActions();
        return actions;
    }


    /**
     * 获得定时的action的数据
     * @param oozieJobId
     * @return
     */
    public List<CoordinatorAction> getCoordinatorAction(String  oozieJobId) throws OozieClientException {
        List<CoordinatorAction> actions = wc.getCoordJobInfo(oozieJobId).getActions();
        return actions;
    }


    /**
     * 获得定时任务所对应的当前任务流id
     * @param oozieJobId
     * @return
     */
    public String getWorkFlowID(String  oozieJobId,Integer num) throws OozieClientException {
        String workFlowID= wc.getJobInfo(oozieJobId+"@"+num).getExternalId();
        return workFlowID;
    }

    /**
     * 根据id获得任务流
     * @param oozieJobId
     * @return
     */
    public WorkflowJob getWorkFlow(String  oozieJobId) throws OozieClientException {
        WorkflowJob workflowJob = wc.getJobInfo(oozieJobId);
        return workflowJob;
    }


    public String runCoordinatorJob(Integer procId, Integer jobId, Properties conf, String expression, Integer scheduleOrIntervalCode, Integer isDurTime, String startTime, String endTime, Integer isRetry, Integer reTryMax, Integer reTryInterval, JSONObject objStartTime, Integer cronType, String cronUrl) throws OozieClientException {
        return runCoordinatorJob(Long.valueOf(procId),jobId,conf,expression,scheduleOrIntervalCode,isDurTime,startTime,endTime,isRetry,reTryMax,reTryInterval,objStartTime, cronType, cronUrl);
    }


    public String runCoordinatorJobLay(Integer procId, Integer workMenuDepId, Properties conf, String expression, String scheduleOrIntervalCode, JSONObject objStart, Integer cronType, String cronUrl) throws OozieClientException {
        String coordDst = "coordinator-app/" + WorkFlowType.WORKFLOW_LAY.getCode() +"/" + procId + "/" + workMenuDepId ;
        String workFlowDst = "workflow-app/" + WorkFlowType.WORKFLOW_LAY.getCode() +"/" + procId + "/" + workMenuDepId ;
        CoordinatorUtil coordinatorUtil = new CoordinatorUtil();
        if(Objects.equals(2,cronType)){
            //crontab表达式
            hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(workMenuDepId,WorkFlowType.WORKFLOW_LAY.getCode()).replace("xmlns:","").getBytes(), coordDst + "/coordinator.xml");
            //crontab表达式
            conf.setProperty("frequency",cronUrl);
        }else {
            String time = coordinatorUtil.time2ELExpresion(expression, scheduleOrIntervalCode);
            //间隔使用内置的方法
            if (Objects.equals(scheduleOrIntervalCode, ScheduleRepeatType.INTERVAL.getCode().toString())) {
                //内置的频率，直接先放进去
                hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(time, workMenuDepId, WorkFlowType.WORKFLOW_LAY.getCode()).replace("xmlns:", "").getBytes(), coordDst + "/coordinator.xml");
            } else {
                hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(workMenuDepId, WorkFlowType.WORKFLOW_LAY.getCode()).replace("xmlns:", "").getBytes(), coordDst + "/coordinator.xml");
                //crontab表达式
                conf.setProperty("frequency", time);
            }
        }
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm+0800");
        simpleDateFormat.setTimeZone(TimeZone.getTimeZone("GMT+0800"));
        SimpleDateFormat simpleDateFormat1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        DateTime now = DateTime.now();
        conf.setProperty("workflowAppUri", "hdfs://" + hdfsUtil.bdpJobConfig.getNamespace() + "/user/hdfs/" + workFlowDst);
        Long durTime = 1000*60L;
        if(!Objects.equals(2,cronType)) {
            if (Objects.equals(scheduleOrIntervalCode, ScheduleRepeatType.INTERVAL.getCode().toString())) {
                JSONObject jsonObject = JSONObject.parseObject(expression);
                if (Objects.equals(jsonObject.getString("type"), "hour")) {
                    Integer mins = Integer.valueOf(jsonObject.getString("minute"));
                    durTime = durTime * mins;
                }
                if (Objects.equals(jsonObject.getString("type"), "day")) {
                    Integer hours = Integer.valueOf(jsonObject.getString("hour"));
                    durTime = durTime * hours * 60;
                }
            }
        }
        DateTime startTime01 = new DateTime(now.getTime() + durTime);
        objStart.put("startTime",simpleDateFormat1.format(startTime01));
        conf.setProperty("startTime", simpleDateFormat.format(startTime01));
        conf.setProperty("endTime",simpleDateFormat.format(new DateTime(now.getTime() + 1000*60*60*24*365*99L)));
        conf.setProperty(OozieClient.COORDINATOR_APP_PATH, "hdfs://" + hdfsUtil.bdpJobConfig.getNamespace() + "/user/hdfs/" + coordDst);
        conf.setProperty("jobTracker","localhost:8032");
//        conf.setProperty("EXEC0","rest.sh");
//        conf.setProperty("time_hour",null);

        conf.remove(OozieClient.APP_PATH);

        String oozieJobId = wc.run(conf);


        return oozieJobId;
    }

    /**
     * 运行定时任务
     * @param userId
     * @param jobId
     * @param conf
     * @param expression
     * @param objStartTime
     */
    public String runCoordinatorJob(Long userId, Integer jobId, Properties conf, String expression, Integer scheduleOrIntervalCode,
                                    Integer isDurTime, String startTime, String endTime,
                                    Integer isRetry, Integer reTryMax, Integer reTryInterval, JSONObject objStartTime, Integer cronType, String cronUrl) throws OozieClientException {
        String coordDst = "coordinator-app/" + WorkFlowType.WORKFLOW.getCode() +"/" + userId.toString() + "/" + jobId.toString() ;
        String workFlowDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() +"/" + userId.toString() + "/" + jobId.toString() ;
        CoordinatorUtil coordinatorUtil = new CoordinatorUtil();
        if(Objects.equals(2,cronType)){
            //crontab表达式
            hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(jobId,WorkFlowType.WORKFLOW.getCode()).replace("xmlns:","").getBytes(), coordDst + "/coordinator.xml");
            //crontab表达式
            conf.setProperty("frequency",cronUrl);
        }else{
            if(scheduleOrIntervalCode == null){
                throw new RRException("调度方式为空！");
            }
            String time = coordinatorUtil.time2ELExpresion(expression, String.valueOf(scheduleOrIntervalCode));
            //间隔使用内置的方法
            if(Objects.equals(scheduleOrIntervalCode, ScheduleRepeatType.INTERVAL.getCode())){
                //内置的频率，直接先放进去
                hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(time,jobId,WorkFlowType.WORKFLOW.getCode()).replace("xmlns:","").getBytes(), coordDst + "/coordinator.xml");
            }else{
                hdfsUtil.writeFile(coordinatorUtil.creatCoordinatorXml(jobId,WorkFlowType.WORKFLOW.getCode()).replace("xmlns:","").getBytes(), coordDst + "/coordinator.xml");
                //crontab表达式
                conf.setProperty("frequency",time);
            }
        }
        SimpleDateFormat simpleDateFormat1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm+0800");
        simpleDateFormat.setTimeZone(TimeZone.getTimeZone("GMT+0800"));
        conf.setProperty("workflowAppUri", "hdfs://"+hdfsUtil.bdpJobConfig.getNamespace()+"/user/hdfs/" + workFlowDst);
        DateTime now = DateTime.now();
        Long durTime = 1000*60L;
        if(!Objects.equals(2,cronType)){
            if(Objects.equals(scheduleOrIntervalCode, ScheduleRepeatType.INTERVAL.getCode())){
                JSONObject jsonObject = JSONObject.parseObject(expression);
                if(Objects.equals(jsonObject.getString("type"),"hour")){
                    Integer mins = Integer.valueOf(jsonObject.getString("minute"));
                    durTime = durTime * mins;
                }
                if(Objects.equals(jsonObject.getString("type"),"day")){
                    Integer hours = Integer.valueOf(jsonObject.getString("hour"));
                    durTime = durTime * hours * 60;
                }
            }
        }
        //当前时间为开始时间
        DateTime stDateTime = new DateTime(now.getTime() + durTime);
        String newStartTime = simpleDateFormat.format(stDateTime);
        String ssformat = simpleDateFormat1.format(stDateTime);
        String newEndTime = simpleDateFormat.format(new DateTime(now.getTime() + 1000*60*60*24*365*99L));
        if(Objects.equals(1,isDurTime)){
            if(StringUtils.isNotEmpty(startTime)){
                try {
                    Date parse = simpleDateFormat1.parse(startTime);
                    if(parse.after(stDateTime)){
                        newStartTime = simpleDateFormat.format(parse);
                        ssformat = simpleDateFormat1.format(parse);
                    }
                } catch (ParseException e) {
                    logger.error("开始时间转换异常！error:"+e.getMessage());
                }
            }
            if(StringUtils.isNotEmpty(endTime)){
                try {
                    newEndTime = simpleDateFormat.format(simpleDateFormat1.parse(endTime));
                } catch (ParseException e) {
                    logger.error("开始时间转换异常！error:"+e.getMessage());
                }
            }
        }

        //保存开始时间
        objStartTime.put("startTime",ssformat);
        conf.setProperty("startTime", newStartTime);
        conf.setProperty("endTime",newEndTime);
        conf.setProperty(OozieClient.COORDINATOR_APP_PATH, "hdfs://"+hdfsUtil.bdpJobConfig.getNamespace()+"/user/hdfs/" + coordDst);
//        conf.setProperty("EXEC0","rest.sh");
        conf.setProperty("jobTracker","localhost:8032");
//        conf.setProperty("time_hour",null);

        conf.remove(OozieClient.APP_PATH);

        String oozieJobId = wc.run(conf);


        return oozieJobId;

    }



}
