package com.itheima.task.service.impl;

import com.alibaba.fastjson.JSON;
import com.itheima.common.redis.CacheService;
import com.itheima.model.audit.enums.AuditStatus;
import com.itheima.model.audit.pojos.AuditTask;
import com.itheima.model.common.dtos.ResponseResult;
import com.itheima.model.common.enums.AppHttpCodeEnum;
import com.itheima.task.config.KafkaConfig;
import com.itheima.task.service.AuditService;
import com.itheima.task.service.DelayQueueService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;

import java.util.*;

/**
 * 延迟队列服务实现
 */
@Service
@Slf4j
public class DelayQueueServiceImpl implements DelayQueueService {

    @Autowired
    private CacheService cacheService;

    @Autowired
    private MongoTemplate mongoTemplate;

    @Autowired
    private AuditService auditService;

    @Autowired
    @Qualifier("auditTaskExecutor")
    private ThreadPoolTaskExecutor taskExecutor;

    @Value("${delay-queue.batch-size:10}")
    private int batchSize;

    @Override
    public ResponseResult addDelayTask(AuditTask task, long delaySeconds) {
        try {
            // 1. 保存到 MongoDB
            task.setCreateTime(new Date());
            task.setUpdateTime(new Date());
            task.setAuditStatus(AuditStatus.PENDING.getCode());
            mongoTemplate.save(task);

            // 2. 计算执行时间
            long executeTime = System.currentTimeMillis() + delaySeconds * 1000;
            task.setExecuteTime(executeTime);

            // 3. 添加到延迟队列（ZSet）
            cacheService.zAdd(KafkaConfig.Keys.DELAY_QUEUE_AUDIT_TASKS, 
                    task.getTaskId(), executeTime);

            // 4. 缓存任务状态
            cacheService.hPut(KafkaConfig.Keys.TASK_STATUS_CACHE, 
                    task.getTaskId(), 
                    String.valueOf(AuditStatus.PENDING.getCode()));

            log.info("添加延迟任务成功: taskId={}, delaySeconds={}, executeTime={}", 
                    task.getTaskId(), delaySeconds, new Date(executeTime));

            return ResponseResult.okResult(task);

        } catch (Exception e) {
            log.error("添加延迟任务失败: taskId={}", task.getTaskId(), e);
            return ResponseResult.errorResult(AppHttpCodeEnum.SERVER_ERROR, "添加延迟任务失败");
        }
    }

    @Override
    public ResponseResult cancelTask(String taskId) {
        try {
            // 1. 从延迟队列移除
            cacheService.zRemove(KafkaConfig.Keys.DELAY_QUEUE_AUDIT_TASKS, taskId);

            // 2. 从就绪队列移除（移除所有匹配的元素）
            cacheService.lRemove(KafkaConfig.Keys.READY_QUEUE_AUDIT_TASKS, 0, taskId);

            // 3. 从处理中集合移除
            cacheService.sRemove(KafkaConfig.Keys.PROCESSING_AUDIT_TASKS, taskId);

            // 4. 更新 MongoDB 状态
            Query query = new Query(Criteria.where("taskId").is(taskId));
            AuditTask task = mongoTemplate.findOne(query, AuditTask.class);
            if (task != null) {
                task.setAuditStatus(AuditStatus.CANCELLED.getCode());
                task.setUpdateTime(new Date());
                mongoTemplate.save(task);
            }

            // 5. 更新缓存状态
            cacheService.hPut(KafkaConfig.Keys.TASK_STATUS_CACHE, 
                    taskId, 
                    String.valueOf(AuditStatus.CANCELLED.getCode()));

            log.info("取消任务成功: taskId={}", taskId);
            return ResponseResult.okResult("取消成功");

        } catch (Exception e) {
            log.error("取消任务失败: taskId={}", taskId, e);
            return ResponseResult.errorResult(AppHttpCodeEnum.SERVER_ERROR, "取消任务失败");
        }
    }

    @Override
    public ResponseResult getTaskStatus(String taskId) {
        try {
            // 1. 先从缓存获取
            String statusStr = (String) cacheService.hGet(KafkaConfig.Keys.TASK_STATUS_CACHE, taskId);
            if (statusStr != null) {
                int status = Integer.parseInt(statusStr);
                Map<String, Object> result = new HashMap<>();
                result.put("taskId", taskId);
                result.put("status", status);
                result.put("statusName", AuditStatus.getByCode(status).getDesc());
                return ResponseResult.okResult(result);
            }

            // 2. 缓存未命中，从 MongoDB 查询
            Query query = new Query(Criteria.where("taskId").is(taskId));
            AuditTask task = mongoTemplate.findOne(query, AuditTask.class);
            if (task == null) {
                return ResponseResult.errorResult(AppHttpCodeEnum.DATA_NOT_EXIST, "任务不存在");
            }

            // 3. 更新缓存
            cacheService.hPut(KafkaConfig.Keys.TASK_STATUS_CACHE, 
                    taskId, 
                    String.valueOf(task.getAuditStatus()));

            Map<String, Object> result = new HashMap<>();
            result.put("taskId", taskId);
            result.put("status", task.getAuditStatus());
            result.put("statusName", AuditStatus.getByCode(task.getAuditStatus()).getDesc());
            result.put("createTime", task.getCreateTime());
            result.put("updateTime", task.getUpdateTime());

            return ResponseResult.okResult(result);

        } catch (Exception e) {
            log.error("查询任务状态失败: taskId={}", taskId, e);
            return ResponseResult.errorResult(AppHttpCodeEnum.SERVER_ERROR, "查询失败");
        }
    }

    @Value("${delay-queue.scan-interval:5000}")
    private long scanInterval;

    @Override
    public void scanExpiredTasks() {
        try {
            // 扫描到期任务（提前拉取：当前时间 + 扫描间隔）
            long now = System.currentTimeMillis();
            long futureTime = now + scanInterval;
            
            Set<String> expiredTaskIds = cacheService.zRangeByScore(
                    KafkaConfig.Keys.DELAY_QUEUE_AUDIT_TASKS, 
                    0, 
                    futureTime);

            if (expiredTaskIds == null || expiredTaskIds.isEmpty()) {
                return;
            }

            log.info("扫描到期任务: count={}", expiredTaskIds.size());

            // 批量转移到就绪队列
            for (String taskId : expiredTaskIds) {
                try {
                    cacheService.zRemove(KafkaConfig.Keys.DELAY_QUEUE_AUDIT_TASKS, taskId);
                    cacheService.lRightPush(KafkaConfig.Keys.READY_QUEUE_AUDIT_TASKS, taskId);
                } catch (Exception e) {
                    log.error("转移任务失败: taskId={}", taskId, e);
                }
            }

        } catch (Exception e) {
            log.error("扫描延迟队列失败", e);
        }
    }

    @Override
    public void consumeReadyTasks() {
        try {
            // 1. 批量获取任务
            List<String> taskIds = new ArrayList<>();
            for (int i = 0; i < batchSize; i++) {
                String taskId = (String) cacheService.lLeftPop(KafkaConfig.Keys.READY_QUEUE_AUDIT_TASKS);
                if (taskId == null) {
                    break;
                }
                taskIds.add(taskId);
            }

            if (taskIds.isEmpty()) {
                return;
            }

            log.info("消费就绪队列: count={}", taskIds.size());

            // 3. 提交到线程池执行
            for (String taskId : taskIds) {
                taskExecutor.execute(() -> executeTask(taskId));
            }

        } catch (Exception e) {
            log.error("消费就绪队列失败", e);
        }
    }

    /**
     * 执行任务
     */
    private void executeTask(String taskId) {
        try {
            // 1. 检查是否已在处理中（防重复）
            Long added = cacheService.sAdd(KafkaConfig.Keys.PROCESSING_AUDIT_TASKS, taskId);
            if (added == null || added == 0) {
                log.warn("任务已在处理中，跳过: taskId={}", taskId);
                return;
            }

            log.info("开始执行任务: taskId={}", taskId);

            // 2. 从 MongoDB 查询任务详情
            Query query = new Query(Criteria.where("taskId").is(taskId));
            AuditTask task = mongoTemplate.findOne(query, AuditTask.class);

            if (task == null) {
                log.error("任务不存在: taskId={}", taskId);
                cleanupTask(taskId);
                return;
            }

            // 3. 执行审核
            auditService.executeAudit(task);

            log.info("任务执行完成: taskId={}", taskId);
            
            // 4. 清理任务相关数据
            cleanupTask(taskId);

        } catch (Exception e) {
            log.error("执行任务失败: taskId={}", taskId, e);
            // 失败时也清理，避免数据残留
            cleanupTask(taskId);
        }
    }
    
    /**
     * 清理任务相关数据
     */
    private void cleanupTask(String taskId) {
        try {
            // 1. 从延迟队列移除（如果还在）
            cacheService.zRemove(KafkaConfig.Keys.DELAY_QUEUE_AUDIT_TASKS, taskId);
            
            // 2. 从就绪队列移除（如果还在）
            cacheService.lRemove(KafkaConfig.Keys.READY_QUEUE_AUDIT_TASKS, 0, taskId);
            
            // 3. 从处理中集合移除
            cacheService.sRemove(KafkaConfig.Keys.PROCESSING_AUDIT_TASKS, taskId);
            
            log.debug("清理任务数据完成: taskId={}", taskId);
        } catch (Exception e) {
            log.error("清理任务数据失败: taskId={}", taskId, e);
        }
    }
}

