package com.ruoyi.system.service.impl;

import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.ruoyi.common.core.exception.ServiceException;
import com.ruoyi.common.core.utils.file.FileUtils;
import com.ruoyi.common.mybatis.core.page.PageQuery;
import com.ruoyi.common.mybatis.core.page.TableDataInfo;
import com.ruoyi.common.oss.core.OssClient;
import com.ruoyi.common.redis.utils.RedisUtils;
import com.ruoyi.system.domain.ArchiveLocation;
import com.ruoyi.system.domain.ArchiveResult;
import com.ruoyi.system.domain.ArchiveStrategy;
import com.ruoyi.system.domain.ArchiveTask;
import com.ruoyi.system.domain.bo.ArchiveTaskBo;
import com.ruoyi.system.domain.vo.ArchiveTaskVo;
import com.ruoyi.system.enums.*;
import com.ruoyi.system.mapper.ArchiveLocationMapper;
import com.ruoyi.system.mapper.ArchiveResultMapper;
import com.ruoyi.system.mapper.ArchiveStrategyMapper;
import com.ruoyi.system.mapper.ArchiveTaskMapper;
import com.ruoyi.system.mapper.job.XxlJobInfoDao;
import com.ruoyi.system.service.IArchiveTaskService;
import com.ruoyi.system.util.ArchiveUtil;
import com.ruoyi.system.util.Constants;
import com.ruoyi.system.util.NetUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.*;
import java.time.Duration;
import java.util.Date;
import java.util.*;
import java.util.stream.Collectors;

/**
 * 归档任务Service业务层处理
 *
 * @author ruoyi
 * @date 2024-12-06
 */
@RequiredArgsConstructor
@Service
@Slf4j
public class ArchiveTaskServiceImpl implements IArchiveTaskService {

    private final ArchiveTaskMapper taskMapper;

    private final ArchiveStrategyMapper strategyMapper;

    private final ArchiveResultMapper resultMapper;

    private final ArchiveLocationMapper locationMapper;

    private final XxlJobInfoDao jobInfoDao;

    /**
     * 查询归档任务
     */
    @Override
    public ArchiveTaskVo queryById(Long id) {
        return taskMapper.selectVoById(id);
    }

    /**
     * 查询归档任务列表
     */
    @Override
    public TableDataInfo<ArchiveTaskVo> queryPageList(ArchiveTaskBo bo, PageQuery pageQuery) {
        LambdaQueryWrapper<ArchiveTask> lqw = buildQueryWrapper(bo);
        Page<ArchiveTaskVo> result = taskMapper.selectVoPage(pageQuery.build(), lqw);
        if (CollectionUtils.isNotEmpty(result.getRecords())) {
            List<Long> strategyIds = result.getRecords().stream().map(t -> t.getStrategyId()).collect(Collectors.toList());
            Map<Long, String> strategyNameMap = strategyMapper.selectBatchIds(strategyIds).stream().collect(Collectors.toMap(k -> k.getId(), k -> k.getName()));
            for (ArchiveTaskVo task : result.getRecords()) {
                task.setStrategyName(strategyNameMap.get(task.getStrategyId()));
            }
        }
        return TableDataInfo.build(result);
    }

    /**
     * 查询归档任务列表
     */
    @Override
    public List<ArchiveTaskVo> queryList(ArchiveTaskBo bo) {
        LambdaQueryWrapper<ArchiveTask> lqw = buildQueryWrapper(bo);
        return taskMapper.selectVoList(lqw);
    }

    private LambdaQueryWrapper<ArchiveTask> buildQueryWrapper(ArchiveTaskBo bo) {
        Map<String, Object> params = bo.getParams();
        LambdaQueryWrapper<ArchiveTask> lqw = Wrappers.lambdaQuery();
        lqw.eq(bo.getBackType() != null, ArchiveTask::getBackType, bo.getBackType());
        lqw.eq(bo.getStrategyId() != null, ArchiveTask::getStrategyId, bo.getStrategyId());
        lqw.eq(bo.getCycle() != null, ArchiveTask::getCycle, bo.getCycle());
        lqw.eq(bo.getPubStatus() != null, ArchiveTask::getPubStatus, bo.getPubStatus());
        return lqw;
    }

    /**
     * 新增归档任务
     */
    @Override
    public Boolean insertByBo(ArchiveTaskBo bo) {
        bo.setPubStatus(PubStatusEnum.NOT_PUBLISH.getValue());
        Long id = IdWorker.getId();
        //新增调度任务
        bo.setId(id);

        ArchiveTask add = BeanUtil.toBean(bo, ArchiveTask.class);
        add.setXxlJobId(operateXxlJob(null, bo, "add"));
        validEntityBeforeSave(add);
        boolean flag = taskMapper.insert(add) > 0;
        if (flag) {
            bo.setId(id);
        }
        return flag;
    }

    private Map<String, Object> getXxlJobParams(ArchiveTaskBo bo) {
        Map<String, Object> params = new HashMap<>();
        if (bo == null) {
            return params;
        }
        params.put("jobGroup", 1);
        params.put("jobDesc", bo.getName());
        params.put("author", "admin");
        params.put("scheduleType", "CRON");
        params.put("scheduleConf", getArchiveJobCron(bo));
        params.put("cronGen_display", getArchiveJobCron(bo));
        params.put("glueType", "BEAN");
        params.put("executorHandler", "archiveJobHandler");
        params.put("executorParam", bo.getId());
        params.put("executorRouteStrategy", "FIRST");
        params.put("misfireStrategy", "DO_NOTHING");
        params.put("executorBlockStrategy", "SERIAL_EXECUTION");
        params.put("executorTimeout", 0);
        params.put("executorFailRetryCount", 0);
        params.put("glueRemark", "GLUE代码初始化");

        return params;
    }

    public long operateXxlJob(Long id, ArchiveTaskBo bo, String operate) {
        String url = "http://116.239.32.48:9900/jobinfo/" + operate;
        Map<String, Object> params = getXxlJobParams(bo);
        if (id != null) {
            params.put("id", id);
        }
        String result = HttpUtil.createPost(url).header("from", "zhjx").form(params).timeout(-1).execute().body();
        if (StringUtils.isBlank(result)) {
            throw new ServiceException("操作失败，请重试");
        }
        JSONObject object = JSONObject.parseObject(result);
        if (200 != object.getIntValue("code")) {
            throw new ServiceException("操作失败，请重试:" + object.getString("msg"));
        }
        if ("add".equals(operate)) {
            if (0 == object.getIntValue("content")) {
                throw new ServiceException("操作失败，请重试:" + object.getString("msg"));
            }
            return object.getLongValue("content");
        } else {
            return 0;
        }
    }

    private String getArchiveJobCron(ArchiveTaskBo bo) {
        //秒 分
        StringBuffer sb = new StringBuffer("0 0 ");
        //时
        sb.append(bo.getExecuteHour());
        if (ArchiveJobCronEnum.DAY.getValue() == bo.getCycle()) {
            //每天
            //日 月 周 年
            sb.append(" * * ? *");
        } else if (ArchiveJobCronEnum.WEEK.getValue() == bo.getCycle()) {
            //每周
            //日 月 周 年
            sb.append(" ? * ").append(bo.getExecuteDay()).append(" *");
        } else if (ArchiveJobCronEnum.MONTH.getValue() == bo.getCycle()) {
            //每月
            //日 月 周 年
            sb.append(" ? ").append(bo.getExecuteDay()).append(" * *");
        }

        return sb.toString();
    }

    /**
     * 修改归档任务
     */
    @Override
    public Boolean updateByBo(ArchiveTaskBo bo) {
        ArchiveTask old = taskMapper.selectById(bo.getId());
        if (old == null) {
            throw new ServiceException("任务不存在");
        }
        ArchiveTask update = BeanUtil.toBean(bo, ArchiveTask.class);
        validEntityBeforeSave(update);
        update.setXxlJobId(old.getXxlJobId());

        operateXxlJob(update.getXxlJobId(), bo, "update");

        if(PubStatusEnum.PUBLISHED.getValue() == bo.getPubStatus()){
            operateXxlJob(update.getXxlJobId(), null, "start");
        }else{
            operateXxlJob(update.getXxlJobId(), null, "stop");
        }

        return taskMapper.updateById(update) > 0;
    }

    /**
     * 保存前的数据校验
     */
    private void validEntityBeforeSave(ArchiveTask entity) {
        //TODO 做一些数据校验,如唯一约束
    }

    /**
     * 批量删除归档任务
     */
    @Override
    public Boolean deleteWithValidByIds(Collection<Long> ids, Boolean isValid) {
        if (isValid) {
            //TODO 做一些业务上的校验,判断是否需要校验
        }
        for (Long id : ids) {
            ArchiveTask task = taskMapper.selectById(id);
            if (task == null) {
                continue;
            }
            operateXxlJob(task.getXxlJobId(), null, "remove");
        }
        return taskMapper.deleteBatchIds(ids) > 0;
    }

    @Override
    public void execute(Long id) {
        if (id == null) {
            return;
        }
        ArchiveTask task = taskMapper.selectById(id);
        if (task == null) {
            return;
        }

        //执行归档任务
        String no = DatePattern.PURE_DATE_FORMAT.format(new Date());
        String key = String.format("ArchiveTask:%d:%s", id, no);
        boolean lock = RedisUtils.setObjectIfAbsent(key, id, Duration.ofHours(1));
        if (!lock) {
            return;
        }
        try {
            ArchiveResult result = taskBegin(task, no);
            try {
                ArchiveStrategy strategy = strategyMapper.selectById(task.getStrategyId());
                if (strategy == null) {
                    taskInfo(result, "归档策略不存在", ArchiveJobExecuteStatusEnum.FAILED);
                    return;
                }

                ArchiveLocation location = locationMapper.selectById(strategy.getLocationId());
                if (location == null) {
                    taskInfo(result, "归档位置不存在", ArchiveJobExecuteStatusEnum.FAILED);
                    return;
                }

                doExecute(task, strategy, location, result);
            } catch (Exception e) {
                log.error("归档异常{}", e.getMessage(), e);
                taskInfo(result, e.getMessage(), ArchiveJobExecuteStatusEnum.FAILED);
            }
        } catch (Exception e) {
            log.error("归档异常{}", e.getMessage(), e);
        } finally {
            RedisUtils.deleteObject(key);
        }
    }

    private ArchiveResult taskBegin(ArchiveTask task, String no) {
        resultMapper.deleteByTaskIdAndNo(task.getId(), no);

        ArchiveResult result = new ArchiveResult();
        result.setResultNo(no);
        result.setTaskId(task.getId());
        result.setTaskName(task.getName());
        result.setMessage(DateUtil.formatDateTime(new Date()) + ":任务开始");
        result.setExecuteStatus(ArchiveJobExecuteStatusEnum.EXECUTING.getValue());
        result.setStartTime(new Date());
        resultMapper.insert(result);
        return result;
    }

    private void taskInfo(ArchiveResult result, String message, ArchiveJobExecuteStatusEnum status) {
        result.setEndTime(new Date());
        result.setExecuteStatus(status.getValue());
        result.setMessage(result.getMessage() + "\n" + DateUtil.formatDateTime(new Date()) + ":" + message);
        log.info(message);
        resultMapper.updateById(result);
    }

    /**
     * 执行归档任务
     *
     * @param task     归档任务
     * @param strategy 归档策略
     * @param location 归档位置
     */
    private void doExecute(ArchiveTask task, ArchiveStrategy strategy, ArchiveLocation location, ArchiveResult result) throws Exception {
        taskInfo(result, "开始归档", ArchiveJobExecuteStatusEnum.SUCCESS);

        //要归档的数据范围
        Map<String, Object> range = taskMapper.getRangeDate(strategy.getSourceTable(), strategy.getSourcePk(), strategy.getScript());
        if (MapUtil.isEmpty(range)) {
            taskInfo(result, "没有要归档的数据", ArchiveJobExecuteStatusEnum.SUCCESS);
            return;
        }

        Object minPk = range.get(Constants.MIN_PK);
        Object maxPk = range.get(Constants.MAX_PK);
        Long totalCount = Long.valueOf(range.get(Constants.TOTAL_COUNT).toString());

        if (totalCount == 0) {
            taskInfo(result, "没有要归档的数据", ArchiveJobExecuteStatusEnum.SUCCESS);
            return;
        }
        result.setMinPk(minPk.toString());
        result.setMaxPk(maxPk.toString());
        result.setTotalCount(totalCount);
        taskInfo(result, String.format("归档范围%s ~ %s，共%d条数据", minPk, maxPk, totalCount), ArchiveJobExecuteStatusEnum.EXECUTING);
        boolean success;
        if (ArchiveStrategyTypeEnum.COMPRESS_TABLE.getValue() == strategy.getStrategyType()) {
            //压缩表，备份到数据库
            success = archive2Db(minPk, maxPk, totalCount, strategy, location, result);
        } else if (ArchiveStrategyTypeEnum.SQL_FILE.getValue() == strategy.getStrategyType()) {
            //SQL文件
            success = archive2Sql(minPk, maxPk, totalCount, task, strategy, location, result);
        } else {
            throw new ServiceException("不支持的归档类型");
        }
        taskInfo(result, String.format("数据归档结束，结果:{%b}", success), ArchiveJobExecuteStatusEnum.EXECUTING);

        if (success) {
            //删除归档数据
            deleteArchiveData(minPk, maxPk, task, strategy, result);
        }

        taskInfo(result, "归档结束", ArchiveJobExecuteStatusEnum.SUCCESS);
    }

    /**
     * 删除归档数据
     *
     * @param strategy
     * @param task
     * @param minPk
     * @param maxPk
     */
    private void deleteArchiveData(Object minPk, Object maxPk, ArchiveTask task, ArchiveStrategy strategy, ArchiveResult result) {
        if (ArchiveDeleteStrategyTypeEnum.IMMEDIATELY.getValue() == task.getDeleteStrategy()) {
            //立即删除
            String sourceTable = strategy.getSourceTable();
            String sourcePk = strategy.getSourcePk();
            taskMapper.deleteArchiveDate(sourceTable, sourcePk, minPk, maxPk);

            taskInfo(result, "删除归档数据", ArchiveJobExecuteStatusEnum.EXECUTING);
        }
    }

    /**
     * 备份到SQL文件
     *
     * @param minPk
     * @param maxPk
     * @param task
     * @param strategy
     */
    private boolean archive2Sql(Object minPk, Object maxPk, Long totalCount, ArchiveTask task, ArchiveStrategy strategy,
                                ArchiveLocation location, ArchiveResult result) throws IOException {
        String dir = Constants.TEMP_DIR;
        if (ArchiveLocationTypeEnum.LOCAL.getValue() == location.getLocationType()) {
            dir = location.getLocalDir();
        }
        File sqlFile = new File(String.format("%s/archive/%d/%s.sql", dir, task.getId(), result.getResultNo()));
        if (FileUtils.exist(sqlFile)) {
            FileUtils.del(sqlFile);
        }
        taskInfo(result, "创建SQL文件:" + sqlFile.getAbsolutePath(), ArchiveJobExecuteStatusEnum.EXECUTING);
        sqlFile.getParentFile().mkdirs();
        sqlFile.createNewFile();
        String fields = "*";
        long count;
        try (FileWriter fileWriter = new FileWriter(sqlFile)) {
            List<Map<String, Object>> temp = taskMapper.queryDataById(strategy.getSourceTable(), strategy.getSourcePk(), fields, minPk);
            if (CollectionUtils.isEmpty(temp)) {
                taskInfo(result, "没有要归档的数据", ArchiveJobExecuteStatusEnum.SUCCESS);
                return false;
            }
            List<String> cells = writeSqlHead(strategy, fileWriter, temp.get(0));
            writeDataLine(fileWriter, cells, temp.get(0));
            fileWriter.flush();
            count = 1;
            while (ArchiveUtil.pkCompareTo(minPk, maxPk) < 0 && CollectionUtils.isNotEmpty(temp = taskMapper.queryDataByIds(strategy.getSourceTable(), strategy.getSourcePk(), fields, minPk, maxPk, Constants.BATCH_SIZE))) {
                minPk = temp.get(temp.size() - 1).get(strategy.getSourcePk());

                writeDataSqlLines(fileWriter, temp, cells);

                fileWriter.flush();
                count += temp.size();
                taskInfo(result, String.format("归档数据:%d/%d", count, totalCount), ArchiveJobExecuteStatusEnum.SUCCESS);
            }
        }
        long lines = Files.lines(Paths.get(sqlFile.getAbsolutePath())).count();
        boolean success = count == totalCount && totalCount == lines - 2;
        if (ArchiveLocationTypeEnum.S3.getValue() == location.getLocationType()) {
            try {
                archiveS3(result, sqlFile, location);
                taskInfo(result, "上传SQL文件到对象存储成功", ArchiveJobExecuteStatusEnum.EXECUTING);
            } catch (Exception e) {
                success = false;
                taskInfo(result, "上传SQL文件到对象存储失败:" + e.getMessage(), ArchiveJobExecuteStatusEnum.EXECUTING);
            }
        }
        if (ArchiveLocationTypeEnum.LOCAL.getValue() != location.getLocationType()) {
            taskInfo(result, "删除临时SQL文件:" + sqlFile.getAbsolutePath(), ArchiveJobExecuteStatusEnum.EXECUTING);
            sqlFile.delete();
        }
        return success;
    }

    private void archiveS3(ArchiveResult result, File sqlFile, ArchiveLocation location) {
        OssClient client = ArchiveUtil.getS3Client(location);

        client.upload(sqlFile, String.format("/archive/%d/%s.sql", result.getTaskId(), result.getResultNo()));
    }


    private void writeDataSqlLines(FileWriter fileWriter, List<Map<String, Object>> temp, List<String> cells) throws IOException {
        fileWriter.write(",\n");
        fileWriter.write(ArchiveUtil.getDataSqlLines(temp, cells));
    }

    private List<String> writeSqlHead(ArchiveStrategy strategy, FileWriter fileWriter, Map<String, Object> data) throws IOException {
        List<String> cells = new ArrayList(data.keySet());
        fileWriter.write(ArchiveUtil.getInsertHeadSql(strategy.getSourceTable(), cells));
        return cells;
    }

    private void writeDataLine(FileWriter fileWriter, List<String> cells, Map<String, Object> data) throws IOException {
        fileWriter.write(ArchiveUtil.getDataSqlLine(cells, data));
    }

    /**
     * 备份到数据库
     *
     * @param minPk
     * @param maxPk
     * @param strategy
     */
    private boolean archive2Db(Object minPk, Object maxPk, Long totalCount, ArchiveStrategy strategy, ArchiveLocation location, ArchiveResult result) throws SQLException {
        Object tmpMinPk = minPk;
        List<String> sourceCells = Arrays.asList(strategy.getSourceFields().split(Constants.COMMA));
        List<Map<String, Object>> temp = taskMapper.queryDataById(strategy.getSourceTable(), strategy.getSourcePk(), strategy.getSourceFields(), minPk);
        if (CollectionUtils.isEmpty(temp)) {
            taskInfo(result, "没有要归档的数据", ArchiveJobExecuteStatusEnum.SUCCESS);
            return false;
        }
        List<String> targetCells = Arrays.asList(strategy.getTargetFields().split(Constants.COMMA));
        String headSql = ArchiveUtil.getInsertHeadSql(strategy.getTargetTable(), targetCells);
        String sqlLine = ArchiveUtil.getDataSqlLine(sourceCells, temp.get(0));

        long count;
        try (Connection connection = ArchiveUtil.getDbConnection(location)) {
            //插入数据库
            count = 1;
            write2Db(connection, headSql + sqlLine);
            taskInfo(result, String.format("归档数据:%d/%d", count, totalCount), ArchiveJobExecuteStatusEnum.SUCCESS);
            while (ArchiveUtil.pkCompareTo(minPk, maxPk) < 0 && CollectionUtils.isNotEmpty(temp = taskMapper.queryDataByIds(strategy.getSourceTable(), strategy.getSourcePk(), strategy.getSourceFields(), minPk, maxPk, Constants.BATCH_SIZE))) {
                minPk = temp.get(temp.size() - 1).get(strategy.getSourcePk());

                String sqlLines = ArchiveUtil.getDataSqlLines(temp, sourceCells);
                write2Db(connection, headSql + sqlLines);

                count += temp.size();
                taskInfo(result, String.format("归档数据:%d/%d", count, totalCount), ArchiveJobExecuteStatusEnum.SUCCESS);
            }

            count = getArchivedCount(connection, strategy.getTargetTable(), strategy.getTargetPk(), tmpMinPk, maxPk);
        }
        return count == totalCount;
    }

    private long getArchivedCount(Connection connection, String table, String pkCell, Object minPk, Object maxPk) throws SQLException {
        String sql;
        if (StringUtils.isNumeric(minPk.toString())) {
            sql = String.format("SELECT COUNT(*) FROM %s t WHERE t.%s >= %d   AND t.%s <= %d", table, pkCell, minPk, pkCell, maxPk);
        } else {
            sql = String.format("SELECT COUNT(*) FROM %s t WHERE t.%s >= '%s' AND t.%s <= '%s'", table, pkCell, minPk, pkCell, maxPk);
        }
        log.info(sql);
        try (PreparedStatement ps = connection.prepareStatement(sql)) {
            try (ResultSet resultSet = ps.executeQuery(sql)) {
                resultSet.next();
                return resultSet.getLong(1);
            }
        }
    }

    private void write2Db(Connection connection, String sql) throws SQLException {
        try (PreparedStatement statement = connection.prepareStatement(sql)) {
            statement.execute();
        }
    }
}
