package com.ruicar.afs.jobadmin.controller;

import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruicar.afs.cloud.common.core.util.IResponse;
import com.ruicar.afs.cloud.common.job.core.biz.ExecutorBiz;
import com.ruicar.afs.cloud.common.job.core.biz.model.ReturnT;
import com.ruicar.afs.jobadmin.core.model.AfsJobInfo;
import com.ruicar.afs.jobadmin.core.model.AfsJobLog;
import com.ruicar.afs.jobadmin.core.scheduler.AfsJobScheduler;
import com.ruicar.afs.jobadmin.mapper.AfsJobInfoMapper;
import com.ruicar.afs.jobadmin.core.util.I18nUtil;
import com.ruicar.afs.jobadmin.mapper.AfsJobLogMapper;
import lombok.AllArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;

import java.util.Date;
import java.util.List;

/**
 * The type JobLogController
 * <p>Description: </p>
 *
 * @author Fzero
 * @version 1.0
 * @date Created on 2020.06.13 14:17:13
 * @since 1.0
 */
@RestController
@RequestMapping("/joblog")
@AllArgsConstructor
public class JobLogController {
    /**
     * Logger
     * <p>Description: </p>
     */
    private static Logger logger = LoggerFactory.getLogger(JobLogController.class);
    /**
     * Afs job info mapper
     * <p>Description: </p>
     */
    public AfsJobInfoMapper afsJobInfoMapper;
    /**
     * Afs job log mapper
     * <p>Description: </p>
     */
    public AfsJobLogMapper afsJobLogMapper;

    /**
     * Page list response
     *
     * @param pageNum          page num
     * @param pageSize         page size
     * @param jobGroup         job group
     * @param jobId            job id
     * @param logStatus        log status
     * @param triggerTimeStart trigger time start
     * @param triggerTimeEnd   trigger time end
     * @return the response
     */
    @PostMapping("/pageList")
    public IResponse pageList(
                              @RequestParam(required = false, defaultValue = "0") int pageNum,
                              @RequestParam(required = false, defaultValue = "10") int pageSize,
                              @RequestParam(required = false, defaultValue = "-1") int jobGroup,
                              @RequestParam(required = false, defaultValue = "-1") int jobId,
                              @RequestParam(required = false, defaultValue = "-1") int logStatus,
                              @RequestParam(required = false) String  triggerTimeStart,
                              @RequestParam(required = false) String triggerTimeEnd) {

            return IResponse.success(
                    afsJobLogMapper.selectPage(
                            new Page(pageNum, pageSize),
                            Wrappers.<AfsJobLog>lambdaQuery()
                                    .eq(jobGroup > -1, AfsJobLog::getJobGroup, jobGroup)
                                    .eq(jobId > 0, AfsJobLog::getJobId, jobId)
                                    .eq(logStatus == 1, AfsJobLog::getHandleCode, 200)
                                    .and(logStatus==2,i->{
                                        i.notIn(AfsJobLog::getHandleCode,0,200)
                                                .or()
                                                .notIn(AfsJobLog::getTriggerCode,0,200);
                                    })
                                    .eq(logStatus == 3, AfsJobLog::getHandleCode, 0)
                                    .eq(logStatus == 3, AfsJobLog::getTriggerCode, 200)
                                    .ge(StringUtils.isNotEmpty(triggerTimeStart), AfsJobLog::getTriggerTime,StringUtils.isNotEmpty(triggerTimeStart)? DateUtil.parse(triggerTimeStart, DatePattern.NORM_DATETIME_FORMAT):"")
                                    .le(StringUtils.isNotEmpty(triggerTimeEnd), AfsJobLog::getTriggerTime,StringUtils.isNotEmpty(triggerTimeEnd)?DateUtil.parse(triggerTimeEnd, DatePattern.NORM_DATETIME_FORMAT):"")
                                    .orderByDesc(AfsJobLog::getTriggerTime)
                    ));
    }


    /**
     * Log kill response
     *
     * @param id id
     * @return the response
     */
    @PostMapping("/logKill")
    public IResponse<String> logKill(int id) {
        // base check
        AfsJobLog log = afsJobLogMapper.load(id);
        AfsJobInfo jobInfo = afsJobInfoMapper.loadById(log.getJobId());
        if (jobInfo == null) {
            return IResponse.fail(I18nUtil.getString("jobinfo_glue_jobid_unvalid"));
        }
        if (ReturnT.SUCCESS_CODE != log.getTriggerCode()) {
            return IResponse.fail(I18nUtil.getString("joblog_kill_log_limit"));
        }

        // request of kill
        ReturnT<String> runResult = null;
        try {
            ExecutorBiz executorBiz = AfsJobScheduler.getExecutorBiz(log.getExecutorAddress());
            runResult = executorBiz.kill(jobInfo.getId());
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            runResult = new ReturnT<String>(500, e.getMessage());
        }

        if (ReturnT.SUCCESS_CODE == runResult.getCode()) {
            log.setHandleCode(ReturnT.FAIL_CODE);
            log.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : ""));
            log.setHandleTime(new Date());
            afsJobLogMapper.updateHandleInfo(log);
            return IResponse.success(runResult.getMsg());
        } else {
            return IResponse.fail(runResult.getMsg());
        }
    }

    /**
     * Clear log response
     *
     * @param jobGroup job group
     * @param jobId    job id
     * @param type     type
     * @return the response
     */
    @PostMapping("/clearLog")
    public IResponse<String> clearLog( @RequestParam int jobGroup,  @RequestParam int jobId, @RequestParam int type) {

        Date clearBeforeTime = null;
        int clearBeforeNum = 0;
        if (type == 1) {
            clearBeforeTime = DateUtil.lastMonth( );    // 清理一个月之前日志数据
        } else if (type == 2) {
            clearBeforeTime = DateUtil.offsetMonth(new Date(), -3);    // 清理三个月之前日志数据
        } else if (type == 3) {
            clearBeforeTime = DateUtil.offsetMonth(new Date(), -6);    // 清理六个月之前日志数据
        } else if (type == 4) {
            clearBeforeTime = DateUtil.offsetMonth(new Date(), -12);    // 清理一年之前日志数据
        } else if (type == 5) {
            clearBeforeNum = 1000;        // 清理一千条以前日志数据
        } else if (type == 6) {
            clearBeforeNum = 10000;        // 清理一万条以前日志数据
        } else if (type == 7) {
            clearBeforeNum = 30000;        // 清理三万条以前日志数据
        } else if (type == 8) {
            clearBeforeNum = 100000;    // 清理十万条以前日志数据
        } else if (type == 9) {
            clearBeforeNum = 0;            // 清理所有日志数据
        } else {
            return IResponse.fail(I18nUtil.getString("joblog_clean_type_unvalid"));
        }

        List<Long> logIds = null;
        do {
            logIds = afsJobLogMapper.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000);
            if (logIds != null && logIds.size() > 0) {
                afsJobLogMapper.clearLog(logIds);
            }
        } while (logIds != null && logIds.size() > 0);

        return IResponse.success("");
    }

}
