package com.settlement.system.service.impl;

import com.alibaba.fastjson2.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.settlement.system.common.base.IBaseEnum;
import com.settlement.system.common.constant.SystemConstants;
import com.settlement.system.common.enums.DeleteEnum;
import com.settlement.system.common.enums.SysImportErrorDetailStatusEnum;
import com.settlement.system.common.exception.BusinessException;
import com.settlement.system.common.util.StringUtils;
import com.settlement.system.mapper.SysImportErrorDetailMapper;
import com.settlement.system.mapper.SysImportWorkingQueueMapper;
import com.settlement.system.model.entity.KafkaSendErrorRecord;
import com.settlement.system.model.entity.SysImportErrorDetail;
import com.settlement.system.model.entity.SysImportWorkingQueue;
import com.settlement.system.model.query.ImportErrorDetailPageQuery;
import com.settlement.system.model.query.SysImportWorkingQueueQuery;
import com.settlement.system.service.KafkaSendErrorRecordService;
import com.settlement.system.service.SnowflakeIdWorker;
import com.settlement.system.service.SysImportErrorDetailService;
import com.settlement.system.service.SysImportWorkingQueueService;
import lombok.RequiredArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;

import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;

/**
 * <p>
 * 服务实现类
 * </p>
 *
 * @author huangrensen
 * @since 2023-10-12
 */
@Service
@RequiredArgsConstructor
public class SysImportErrorDetailServiceImpl extends ServiceImpl<SysImportErrorDetailMapper, SysImportErrorDetail> implements SysImportErrorDetailService {

    private final Logger logger = LoggerFactory.getLogger(SysImportErrorDetailServiceImpl.class);

    private final KafkaTemplate<String, String> kafkaTemplate;

    private final RedisTemplate redisTemplate;

    private final SysImportWorkingQueueService importWorkingQueueService;

    private final SysImportWorkingQueueMapper importWorkingQueueMapper;

    private final KafkaSendErrorRecordService kafkaSendErrorRecordService;
    private final SnowflakeIdWorker snowflakeIdWorker = SnowflakeIdWorker.getInstance();

    @Override
    public boolean deleteRecord(String platformAndType, String orderNo) {
        if(!StringUtils.hasText(platformAndType) && !StringUtils.hasText(orderNo)){
            // 需要两个字段都不能为空才查询，避免误更新
            return true;
        }
        List<SysImportErrorDetail> list = this.list(new LambdaQueryWrapper<SysImportErrorDetail>()
                .eq(SysImportErrorDetail::getPlatformAndType, platformAndType)
                .eq(SysImportErrorDetail::getOrderNo, orderNo)
                .eq(SysImportErrorDetail::getDeleted, DeleteEnum.NO.getValue()));
        if(list.size() > 0){
            // 如果存在记录，就标志删除
            for (SysImportErrorDetail sysImportErrorDetail : list) {
                logger.info("存在队列消费失败记录，将标记为删除：【{}】【{}】", platformAndType, orderNo);
                sysImportErrorDetail.setDeleted(DeleteEnum.YES.getValue());
            }
            this.updateBatchById(list);
        }
        return true;
    }


    @Override
    public Page<SysImportErrorDetail> getImportErrorDetailPage(ImportErrorDetailPageQuery queryParams) {
        // 查询参数
        int pageNum = queryParams.getPageNum();
        int pageSize = queryParams.getPageSize();
        // 查询数据
        Page<SysImportErrorDetail> page = this.page(
                new Page<>(pageNum, pageSize),
                new LambdaQueryWrapper<SysImportErrorDetail>()
                        .eq(StringUtils.hasText(queryParams.getPlatformAndType()), SysImportErrorDetail::getPlatformAndType, queryParams.getPlatformAndType())
                        .eq(StringUtils.hasText(queryParams.getOrderNo()), SysImportErrorDetail::getOrderNo, queryParams.getOrderNo())
                        .eq(StringUtils.hasText(queryParams.getQueueName()), SysImportErrorDetail::getQueueName, queryParams.getQueueName())
                        .ge(Objects.nonNull(queryParams.getStartDate()), SysImportErrorDetail::getCreateTime, Objects.nonNull(queryParams.getStartDate()) ? queryParams.getStartDate().atStartOfDay() : null)
                        .lt(Objects.nonNull(queryParams.getEndDate()), SysImportErrorDetail::getCreateTime, Objects.nonNull(queryParams.getEndDate()) ? queryParams.getEndDate().plusDays(1).atStartOfDay() : null)
                        // 查询消费失败的数据记录
                        .eq(SysImportErrorDetail::getStatus, SysImportErrorDetailStatusEnum.ERROR.getValue())
                        // 查询未删除的记录
                        .eq(SysImportErrorDetail::getDeleted, DeleteEnum.NO.getValue())
                        .orderByDesc(SysImportErrorDetail::getUpdateTime).orderByDesc(SysImportErrorDetail::getId));
        if (page.getRecords().size() > 0) {
            for (SysImportErrorDetail record : page.getRecords()) {
                record.setStatusName(IBaseEnum.getLabelByValue(record.getStatus(), SysImportErrorDetailStatusEnum.class));
            }
        }
        return page;
    }

    @Override
    public void checkAndCreate(SysImportErrorDetail detail) {
        SysImportErrorDetail errorDetail = this.getOne(new LambdaQueryWrapper<SysImportErrorDetail>().eq(SysImportErrorDetail::getPlatformAndType, detail.getPlatformAndType()).eq(SysImportErrorDetail::getOrderNo, detail.getOrderNo()));
        if (Objects.isNull(errorDetail)) {
            errorDetail = new SysImportErrorDetail();
            BeanUtils.copyProperties(detail, errorDetail);
            errorDetail.cutString();
            this.save(errorDetail);
        } else {
            errorDetail.setStatus(SysImportErrorDetailStatusEnum.ERROR.getValue());
            errorDetail.setRemark(detail.getRemark());
            errorDetail.setUpdateTime(LocalDateTime.now());
            errorDetail.cutString();
            this.updateById(errorDetail);
        }
    }

    @Override
    public void rePush(String idStr) {
        Assert.isTrue(StringUtils.hasText(idStr), "推送的数据为空");
        List<Long> idArray = Arrays.stream(idStr.split(",")).map(Long::parseLong).toList();
        List<SysImportErrorDetail> sysImportErrorDetails = this.getBaseMapper().selectBatchIds(idArray);
        for (SysImportErrorDetail sysImportErrorDetail : sysImportErrorDetails) {
            sysImportErrorDetail.setUuid(snowflakeIdWorker.nextId());
            CompletableFuture<SendResult<String, String>> completableFuture = kafkaTemplate.send(sysImportErrorDetail.getQueueName(), sysImportErrorDetail.getDataText());
            completableFuture.whenComplete((result, throwable) -> {
                if(Objects.nonNull(throwable)){
                    throwable.printStackTrace();
                    KafkaSendErrorRecord errorRecord = new KafkaSendErrorRecord();
                    errorRecord.setDataText(JSONObject.toJSONString(sysImportErrorDetail.getDataText()));
                    errorRecord.setRemark(throwable.getMessage());
                    errorRecord.setQueueName(sysImportErrorDetail.getQueueName());
                    errorRecord.setUuid(sysImportErrorDetail.getUuid());
                    kafkaSendErrorRecordService.checkAndSave(errorRecord);
                }
            });
        }
    }

    @Override
    public void pushOne(SysImportErrorDetail detail) {
        SysImportErrorDetail importErrorDetail = this.getBaseMapper().selectById(detail.getId());
        importErrorDetail.setDataText(detail.getDataText());
        this.updateById(importErrorDetail);
        importErrorDetail.setUuid(snowflakeIdWorker.nextId());
        CompletableFuture<SendResult<String, String>> completableFuture = kafkaTemplate.send(importErrorDetail.getQueueName(), detail.getDataText());
        completableFuture.whenComplete((result, throwable) -> {
            if(Objects.nonNull(throwable)){
                throwable.printStackTrace();
                KafkaSendErrorRecord errorRecord = new KafkaSendErrorRecord();
                errorRecord.setDataText(JSONObject.toJSONString(importErrorDetail.getDataText()));
                errorRecord.setRemark(throwable.getMessage());
                errorRecord.setQueueName(importErrorDetail.getQueueName());
                errorRecord.setUuid(importErrorDetail.getUuid());
                kafkaSendErrorRecordService.checkAndSave(errorRecord);
            }
        });
    }

    @Override
    public void rePushAll() {
        Object lockObj = redisTemplate.opsForValue().get(SystemConstants.IMPORT_WORKING_QUEUE_LOCK);
        Assert.isNull(lockObj, "有任务正在处理，请稍等几分钟再试...");
        // 锁住队列
        redisTemplate.opsForValue().set(SystemConstants.IMPORT_WORKING_QUEUE_LOCK, SystemConstants.IMPORT_WORKING_QUEUE_LOCK);
        // 开始工作
        boolean runFlag;
        try {
            if (StringUtils.hasText(importWorkingQueueMapper.existTable())) {
                importWorkingQueueMapper.dropTable();
            }
            // 创建表
            importWorkingQueueMapper.createTable();
            ImportErrorDetailPageQuery queryParams = new ImportErrorDetailPageQuery();
            queryParams.setPageNum(1);
            queryParams.setPageSize(300);
            logger.info("正在查询转换失败的记录...");
            Page<SysImportErrorDetail> detailPage = this.getImportErrorDetailPage(queryParams);
            List<SysImportWorkingQueue> importWorkingQueues;
            while (detailPage.getRecords().size() > 0) {
                importWorkingQueues = new ArrayList<>();
                // 正在把数据转移到另外一张表中
                for (SysImportErrorDetail record : detailPage.getRecords()) {
                    SysImportWorkingQueue workingQueue = new SysImportWorkingQueue();
                    workingQueue.setDataId(record.getId().toString());
                    importWorkingQueues.add(workingQueue);
                }
                // 保存数据
                importWorkingQueueService.saveBatch(importWorkingQueues);
                // 查询下一页
                queryParams.setPageNum(queryParams.getPageNum() + 1);
                logger.info("正在查询{}页数据，共{}页...", queryParams.getPageNum(), detailPage.getPages());
                detailPage = this.getImportErrorDetailPage(queryParams);
            }
            logger.info("查询转换失败的记录完成...");
            // 然后直接查询工作表的数据即可
            SysImportWorkingQueueQuery importWorkingQueueQuery = new SysImportWorkingQueueQuery();
            importWorkingQueueQuery.setPageNum(1);
            importWorkingQueueQuery.setPageSize(300);
            Page<SysImportWorkingQueue> workingQueuePage = importWorkingQueueService.getPage(importWorkingQueueQuery);
            List<String> dataIdList;
            while (workingQueuePage.getRecords().size() > 0) {
                dataIdList = workingQueuePage.getRecords().stream().map(SysImportWorkingQueue::getDataId).toList();
                // 再按照ID列表去查
                List<SysImportErrorDetail> detailList = this.getBaseMapper().selectBatchIds(dataIdList);
                for (SysImportErrorDetail detail : detailList) {
                    detail.setUuid(snowflakeIdWorker.nextId());
                    CompletableFuture<SendResult<String, String>> completableFuture = kafkaTemplate.send(detail.getQueueName(), detail.getDataText());
                    completableFuture.whenComplete((result, throwable) -> {
                        if(Objects.nonNull(throwable)){
                            throwable.printStackTrace();
                            KafkaSendErrorRecord errorRecord = new KafkaSendErrorRecord();
                            errorRecord.setDataText(JSONObject.toJSONString(detail.getDataText()));
                            errorRecord.setRemark(throwable.getMessage());
                            errorRecord.setQueueName(detail.getQueueName());
                            errorRecord.setUuid(detail.getUuid());
                            kafkaSendErrorRecordService.checkAndSave(errorRecord);
                        }
                    });
                }
                // 查询下一页
                importWorkingQueueQuery.setPageNum(importWorkingQueueQuery.getPageNum() + 1);
                logger.info("正在查询{}页数据，共{}页...", importWorkingQueueQuery.getPageNum(), workingQueuePage.getPages());
                workingQueuePage = importWorkingQueueService.getPage(importWorkingQueueQuery);
            }
            runFlag = true;
        } catch (Exception e) {
            e.printStackTrace();
            runFlag = false;
        } finally {
            // 删除锁住标志
            redisTemplate.delete(SystemConstants.IMPORT_WORKING_QUEUE_LOCK);
        }
        // 检查是否出现错误
        if (!runFlag) {
            throw new BusinessException("操作失败,请联系运维人员");
        }
    }
}
