/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.lvyh.lightframe.job.admin.job;

import com.alibaba.fastjson.JSON;
import com.google.common.collect.Maps;
import com.lvyh.lightframe.job.Constants;
import com.lvyh.lightframe.job.admin.bean.JobAlarmConfigBean;
import com.lvyh.lightframe.job.admin.bean.JobAlarmLogBean;
import com.lvyh.lightframe.job.admin.bean.JobTaskDefBean;
import com.lvyh.lightframe.job.admin.bean.JobTaskHistoryBean;
import com.lvyh.lightframe.job.admin.bean.JobTaskHistoryDetailBean;
import com.lvyh.lightframe.job.admin.manager.TaskNodeManager;
import com.lvyh.lightframe.job.admin.processor.JobManageProcessor;
import com.lvyh.lightframe.job.admin.schedule.ScheduleManager;
import com.lvyh.lightframe.job.admin.service.MessageNotifyService;
import com.lvyh.lightframe.job.admin.util.NetUtils;
import com.lvyh.lightframe.job.admin.util.SpringContextUtils;
import com.lvyh.lightframe.job.client.InvokeResult;
import com.lvyh.lightframe.job.enums.AlarmStatus;
import com.lvyh.lightframe.job.enums.AlarmType;
import com.lvyh.lightframe.job.enums.ScheduleStatus;
import com.lvyh.lightframe.job.enums.TaskMode;
import org.apache.commons.lang3.StringUtils;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

/**
 * Abstract job
 */
public abstract class AbstractJob implements Job {

    private Logger logger = LoggerFactory.getLogger(AbstractJob.class);

    /**
     * This method is set to final to prohibit subclass coverage and prevent logic errors
     */
    @Override
    public final void execute(JobExecutionContext context) throws JobExecutionException {

        InvokeResult executeResult = new InvokeResult();
        Integer retrySeq = this.parseRetrySeq(context);
        JobTaskDefBean taskDefBean = this.parseTaskConfigInfo(context);

        int taskId = taskDefBean.getId();
        Integer failRetryCount = taskDefBean.getExecutorFailRetryCount();
        Integer concurrent = taskDefBean.getConcurrentModel();
        Integer mode = taskDefBean.getExecuteMode();

        handleTempSchedule(mode, taskId);
        if (retrySeq.intValue() > failRetryCount.intValue()) {
            logger.info("taskId:{}, Retry task reached maximum:{}, Stop retry", taskId, failRetryCount);
            return;
        }

        boolean taskNodeCreated = false;
        boolean needRetry = true;
        Exception throwable = null;

        Integer historyId = createTaskHistoryInfo(context, taskDefBean, retrySeq);
        Integer historyDetailId = null;
        try {
            historyDetailId = createTaskHistoryDetailInfo(historyId, retrySeq, taskId);

            /**
             * Task concurrency, 1-concurrent, 0-serial
             */
            if (concurrent == 0) {
                taskNodeCreated = createTaskExecuteNode(historyId, historyDetailId, taskId);
                if (taskNodeCreated) {
                    executeResult = executeInternal(context);
                } else {
                    executeResult = new InvokeResult();
                    executeResult.setSuccess(false);
                    executeResult.setCode("LastNotDone");
                    executeResult.setDescription("The last job has not been completed, taskId=" + taskId);
                }
            } else {
                executeResult = executeInternal(context);
            }
            needRetry = executeResult.isSuccess() == false;
        } catch (Exception e) {
            throwable = e;
            needRetry = true; // Need to retry
            executeResult = new InvokeResult();
            executeResult.setSuccess(false);
            executeResult.setCode("Exception");
            executeResult.setDescription(e.getMessage());
            logger.error("EXEC-JOB-FAILED:{}, taskId={}", e.getMessage(), taskId, e);
        } finally {
            logger.info("DONE-taskId-{}, {}", taskId, executeResult.isSuccess() ? "success" : JSON.toJSONString(executeResult));
            if (needRetry && failRetryCount.intValue() <= retrySeq.intValue()) {
                needRetry = false; // Maximum number of retries reached
            }

            if (concurrent == 0) {
                removeTaskNode(taskNodeCreated, taskId);
            }
            updateJobTaskHistoryDetail(historyDetailId, executeResult);

            updateJobTaskHistory(needRetry, taskId, historyId, retrySeq, executeResult);

            if (!needRetry) {// Don't try again
                return;
            }

            // retry
            context.put("retrySeq", ++retrySeq);
            JobExecutionException exception = new JobExecutionException("needRetry-taskId-" + taskId + "-" + executeResult.getDescription(), throwable);
            //The job will refire immediately
            exception.setRefireImmediately(true);
            throw exception;
        }

    }

    private void updateJobTaskHistory(boolean needRetry, int taskId, int historyId, int retrySeq, InvokeResult executeResult) {
        try {
            JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);

            String status = executeResult.isSuccess() ? "success" : "failed";
            // Update task history status
            JobTaskHistoryBean history = new JobTaskHistoryBean();
            history.setId(historyId);
            history.setExecuteStatus(status);
            history.setExecutorFailRetryCount(retrySeq);
            // Last retry update task stop time
            if (!needRetry) history.setTaskEndDate(new Date());

            jobManageProcessor.updateJobTaskHistoryBean(history);

            if (!needRetry) {
                if (!executeResult.isSuccess()) {

                    // The result is failure, indicating that the retry is abandoned
                    saveJobAlarmLog(taskId, executeResult.getDescription());

                    // Send notifications and log events
                    notifyUser(taskId, executeResult.getDescription());
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }

    }


    private void updateJobTaskHistoryDetail(int historyDetailId, InvokeResult executeResult) {
        try {
            JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
            if (executeResult.getDescription() == null) executeResult.setDescription("");
            String status = executeResult.isSuccess() ? "success" : "failed";
            executeResult.setDescription(StringUtils.substring(executeResult.getDescription(), 0, 256));

            JobTaskHistoryDetailBean historyDetail = new JobTaskHistoryDetailBean();
            historyDetail.setId(historyDetailId);
            historyDetail.setStatus(status);
            historyDetail.setTaskEndTime(new Date());
            historyDetail.setExecuteResult(StringUtils.substring(JSON.toJSONString(executeResult), 0, 1000));
            historyDetail.setExecuteResultDesc(executeResult.isSuccess() ? ScheduleStatus.SUCCESS.getDesc() : ScheduleStatus.FAILED.getDesc());

            jobManageProcessor.updateJobTaskHistoryDetailBean(historyDetail);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    private Integer createTaskHistoryInfo(JobExecutionContext context, JobTaskDefBean taskDefBean, Integer retrySeq) {
        Integer historyId = (Integer) context.get("historyId");
        try {
            if (historyId == null) {
                historyId = saveTaskHistoryInfo(taskDefBean, retrySeq);
                context.put("historyId", historyId);
            } else {
                logger.info("Retry task {} time:taskId:{}", retrySeq, taskDefBean.getId());
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return historyId;
    }

    private Integer createTaskHistoryDetailInfo(int historyId, int retrySeq, int taskId) throws Exception {
        JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
        JobTaskHistoryDetailBean historyDetail = new JobTaskHistoryDetailBean();
        historyDetail.setHistoryId(historyId);
        historyDetail.setRetrySeq(retrySeq);
        historyDetail.setStatus("executing");
        historyDetail.setTaskBeginTime(new Date());
        historyDetail.setTaskId(taskId);
        return jobManageProcessor.saveJobTaskHistoryDetailBean(historyDetail);
    }

    private boolean createTaskExecuteNode(int historyId, int historyDetailId, int taskId) throws Exception {
        TaskNodeManager taskNodeManager = SpringContextUtils.getBean(TaskNodeManager.class);
        /**
         * Create ZK task temporary node.
         * If it is created successfully, it indicates that the unique execution locking is successful. Then execute scheduling.
         * If the creation return node already exists, it is proved that another node has already created the node,
         * then the node will no longer execute.
         */
        Map<String, String> zkNodeData = Maps.newHashMap();
        zkNodeData.put("task_id", String.valueOf(taskId));
        zkNodeData.put("history_id", String.valueOf(historyId));
        zkNodeData.put("history_detail_id", String.valueOf(historyDetailId));

        return taskNodeManager.createTaskNode(taskId, zkNodeData);
    }

    private int parseRetrySeq(JobExecutionContext context) {
        Integer retrySeq = (Integer) context.get("retrySeq");
        if (retrySeq == null) retrySeq = 0;
        return retrySeq;
    }

    private void handleTempSchedule(Integer mode, int taskId) {
        ScheduleManager scheduleManager = SpringContextUtils.getBean(ScheduleManager.class);
        // If it is temporary scheduling, cancel task scheduling
        if (mode != null && mode.intValue() == TaskMode.TMP.getMode()) {
            scheduleManager.deleteTmpSchedule(taskId);
        }
    }

    private JobTaskDefBean parseTaskConfigInfo(JobExecutionContext context) {
        JobDataMap dataMap = context.getJobDetail().getJobDataMap();

        /**
         * If the mode is empty, it is the default scheduling.
         * If the mode is 1, it is temporary execution. After execution, the scheduling is cancelled
         */
        Integer mode = (Integer) dataMap.get("mode");
        int taskId = (int) dataMap.get("taskId");
        Integer failRetryCount = (Integer) dataMap.get("retries");
        if (failRetryCount == null) failRetryCount = 0;
        Integer concurrent = (Integer) dataMap.get("concurrent");

        JobTaskDefBean jobTaskDefBean = new JobTaskDefBean();
        jobTaskDefBean.setExecutorFailRetryCount(failRetryCount);
        jobTaskDefBean.setId(taskId);
        jobTaskDefBean.setConcurrentModel(concurrent);
        jobTaskDefBean.setExecuteMode(mode);
        jobTaskDefBean.setTaskMetaInfo(JSON.toJSONString(dataMap));

        return jobTaskDefBean;
    }

    private Integer saveTaskHistoryInfo(JobTaskDefBean taskDefBean, int retrySeq) throws Exception {
        JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
        JobTaskHistoryBean history = new JobTaskHistoryBean();
        history.setMetaDataInfo(taskDefBean.getTaskMetaInfo());
        history.setTaskId(taskDefBean.getId());
        history.setScheduleTime(new Date());
        history.setTaskStartDate(new Date());
        history.setExecuteStatus("executing");
        history.setExecutorFailRetryCount(retrySeq);
        history.setScheduleNode(NetUtils.getHostAddress());
        history.setOperator("system");
        history.setScheduleType(taskDefBean.getExecuteMode());
        return jobManageProcessor.saveJobTaskHistoryBean(history);
    }

    protected void saveJobAlarmLog(Integer taskId, String errMsg) throws Exception {
        JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
        JobAlarmLogBean alarmLog = new JobAlarmLogBean();
        alarmLog.setTaskId(taskId);
        alarmLog.setAlarmTime(new Date());
        alarmLog.setAlarmType(AlarmType.TASK_EXECUTE_EXCEPTION.getKey());
        alarmLog.setAlarmName(AlarmType.TASK_EXECUTE_EXCEPTION.getValue());
        alarmLog.setAlarmDetail(AlarmType.TASK_EXECUTE_EXCEPTION.getDesc() + ",taskId:" + taskId + ",error:" + errMsg);

        jobManageProcessor.saveJobAlarmLogBean(alarmLog);
    }


    protected void removeTaskNode(boolean taskNodeCreated, Integer taskId) {

        TaskNodeManager taskNodeManager = SpringContextUtils.getBean(TaskNodeManager.class);
        try {

            if (taskNodeCreated) {
                taskNodeManager.removeTaskNode(taskId);
            }
        } catch (Exception e) {

            // The undeleted temporary nodes are regularly scanned and cleared by the background thread
            taskNodeManager.scheduleDelete(taskId);
            logger.error(e.getMessage(), e);

            JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
            JobAlarmLogBean alarmLog = new JobAlarmLogBean();
            alarmLog.setTaskId(taskId);
            alarmLog.setAlarmTime(new Date());
            alarmLog.setAlarmType(AlarmType.ZK_TASK_NODE_MISS_DEL.getKey());
            alarmLog.setAlarmName(AlarmType.ZK_TASK_NODE_MISS_DEL.getValue());
            alarmLog.setAlarmDetail(AlarmType.ZK_TASK_NODE_MISS_DEL.getDesc() + ",taskId:" + taskId);
            try {
                jobManageProcessor.saveJobAlarmLogBean(alarmLog);
            } catch (Exception ex) {
                ex.printStackTrace();
            }
        }
    }

    protected void notifyUser(Integer taskId, String errMsg) {

        try {
            JobManageProcessor jobManageProcessor = SpringContextUtils.getBean(JobManageProcessor.class);
            MessageNotifyService messageNotifyService = SpringContextUtils.getBean(MessageNotifyService.class);
            JobAlarmConfigBean alarmConfigBean = jobManageProcessor.getJobAlarmConfigInfo(taskId);
            JobTaskDefBean task = jobManageProcessor.getJobTaskDefById(taskId);

            if (alarmConfigBean == null || alarmConfigBean.getStatus() != AlarmStatus.ENABLED.getStatus()) {
                return;
            }

            HashMap<String, String> param = new HashMap<String, String>();
            param.put("task_id", taskId.toString());
            param.put("task_name", task.getDisplayName());
            param.put("alarm_content", "The maximum number of retries has been reached,errMsg:" + errMsg);
            messageNotifyService.sendMessage(alarmConfigBean, param, Constants.EVENT_KEY_SCHEDULE_ALARM);
        } catch (Exception e) {
            logger.error("Task alarm message send failed(taskId={}):{}", taskId, e.getMessage());
        }

    }

    /**
     * Implementation provided by subclasses
     */
    protected abstract InvokeResult executeInternal(JobExecutionContext context) throws InterruptedException, IOException, URISyntaxException;

}
