package avicit.bdp.dds.service.quartz;


import avicit.bdp.common.utils.DateUtils;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.dds.dispatch.enums.CommandType;
import avicit.bdp.dds.dispatch.enums.ReleaseState;
import avicit.bdp.dds.dispatch.enums.TaskDependType;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.common.Constants;
import avicit.bdp.dds.dao.entity.Command;
import avicit.bdp.dds.dao.entity.ProcessDefinition;
import avicit.bdp.dds.dao.entity.Schedule;
import avicit.bdp.dds.service.process.ProcessService;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.TypeReference;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;

import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

/**
 * process schedule job
 */
public class ProcessScheduleJob implements Job {

    /**
     * logger of ProcessScheduleJob
     */
    private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class);

    public ProcessService getProcessService(){
        return SpringApplicationContext.getBean(ProcessService.class);
    }

    /**
     * Called by the Scheduler when a Trigger fires that is associated with the Job
     *
     * @param context JobExecutionContext
     * @throws JobExecutionException if there is an exception while executing the job.
     */
    @Override
    public void execute(JobExecutionContext context) throws JobExecutionException {
        Assert.notNull(getProcessService(), "please call init() method first");

        JobDataMap dataMap = context.getJobDetail().getJobDataMap();

        String projectId = dataMap.getString(Constants.PROJECT_ID);
        String scheduleId = dataMap.getString(Constants.SCHEDULE_ID);

        Date scheduledFireTime = context.getScheduledFireTime();

        Date fireTime = context.getFireTime();

        logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId);

        // query schedule
        Schedule schedule = getProcessService().querySchedule(scheduleId);
        if (schedule == null) {
            logger.warn("process schedule does not exist in db，delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId);
            deleteJob(projectId, scheduleId);
            return;
        }

        ProcessDefinition processDefinition = getProcessService().findProcessDefineById(schedule.getProcessDefinitionId());
        if (processDefinition == null) {
            logger.warn("process definition does not exist in db，need not to create command, projectId:{}, processId:{}", projectId, scheduleId);
            return;
        }

        // release state : online/offline
        ReleaseState releaseState = ReleaseState.of(processDefinition.getReleaseState());
        if (releaseState == ReleaseState.OFFLINE) {
            logger.warn("process definition does not offline，need not to create command, projectId:{}, processId:{}", projectId, scheduleId);
            return;
        }

        // TODO 查看上次同步任务是否完成

        // TODO 先读取流程定义extraParams,如果不为空,则将查询数据的结束时间修改为当前系统时间
        // TODO 解决定时增量同步，查询数据的结束时间和任务执行时间保持一致的问题
        if(StringUtils.isNotEmpty(processDefinition.getExtraParams())){
            List<Property> properties = JSONObject.parseObject(processDefinition.getExtraParams(),
                    new TypeReference<List<Property>>() { });
            Map<String, Property> extraParamsMap = this.parseListToMap(properties);
            if (extraParamsMap != null && !extraParamsMap.isEmpty()) {
                // 创建流程定义时，已经放入结束时间参数
                // 将查询数据的结束时间置为下次的查询时间,将系统当前时间作为下次查询的结束时间
                // 目的在于增量同步时，赋值给where条件，区间为左闭右开
                Property endProperty = extraParamsMap.get("DATA_END_TIME");
                if (endProperty != null && StringUtils.isNotEmpty(endProperty.getValue())) {
                    endProperty.setValue(DateUtils.format(new Date(), avicit.bdp.core.constant.Constants.YYYY_MM_DD_HH_MM_SS));
                    extraParamsMap.put("DATA_END_TIME", endProperty);
                    Collection<Property> values = extraParamsMap.values();
                    // 更新流程定义ExtraParams
                    getProcessService().updateProcessDefinitionExtraParams(processDefinition.getId(), JSON.toJSONString(values));
                    logger.info("schedule start update process definition: {}, extraParams: {}",
                            processDefinition.getId(), extraParamsMap);
                }
            }
        }

        Command command = new Command();
        command.setCommandType(CommandType.SCHEDULER.getCode());
        command.setExecutorId(schedule.getCreatedBy());
        command.setFailureStrategy(schedule.getFailureStrategy());
        command.setProcessDefinitionId(schedule.getProcessDefinitionId());
        command.setScheduleTime(scheduledFireTime);
        command.setStartTime(fireTime);
        command.setWarningUserIds(schedule.getWarningUserIds());
        String workerGroup = StringUtils.isEmpty(schedule.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : schedule.getWorkerGroup();
        command.setWorkerGroup(workerGroup);
        command.setWarningType(schedule.getWarningType());
        command.setProcessInstancePriority(schedule.getProcessInstancePriority());

        Map<String,String> cmdParam = new HashMap<>(16);
        command.setCommandParam(JSON.toJSONString(cmdParam));
        command.setTaskDependType(TaskDependType.TASK_POST.getCode());

        getProcessService().createCommand(command);
    }


    /**
     * delete job
     */
    private void deleteJob(String projectId, String scheduleId) {
        String jobName = QuartzExecutors.buildJobName(scheduleId);
        String jobGroupName = QuartzExecutors.buildJobGroupName(projectId);
        QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName);
    }

    private Map<String, Property> parseListToMap(List<Property> list) {
        if (CollectionUtils.isNotEmpty(list)) {
            Map<String, Property> map = new LinkedHashMap<>();
            for (Property property : list) {
                map.put(property.getProp(), property);
            }
            return map;
        }
        return null;
    }

}
