package com.heima.schedule.service.impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.heima.model.schedule.dtos.Task;
import com.heima.model.schedule.pojos.Taskinfo;
import com.heima.model.schedule.pojos.TaskinfoLogs;
import com.heima.model.topics.WmNewsConstants;
import com.heima.schedule.mapper.TaskinfoLogsMapper;
import com.heima.schedule.mapper.TaskinfoMapper;
import com.heima.schedule.service.TaskService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.util.Calendar;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;

@Service
@Slf4j
@RequiredArgsConstructor
public class TaskServiceImpl implements TaskService {
    private final StringRedisTemplate redisTemplate;
    private final TaskinfoMapper taskinfoMapper;
    private final TaskinfoLogsMapper taskinfoLogsMapper;
    private final KafkaTemplate<String, String> kafkaTemplate;
    private final static String TASK = "TASK:";

    /**
     * 添加延迟任务(主动调用)
     *
     * @param task 任务对象
     * @return
     */
    @Override
    public long addTask(Task task) {
        log.info("添加任务！");
        //1.添加任务到数据库
        Taskinfo taskinfo = new Taskinfo();
        BeanUtils.copyProperties(task, taskinfo);
        taskinfoMapper.insert(taskinfo);
        //设置任务id
        task.setTaskId(taskinfo.getTaskId());
        //保存任务日志数据
        TaskinfoLogs taskinfoLogs = new TaskinfoLogs();
        BeanUtils.copyProperties(taskinfo, taskinfoLogs);
        taskinfoLogs.setStatus(TaskinfoLogs.SCHEDULED);
        taskinfoLogsMapper.insert(taskinfoLogs);

        //2.添加任务到redis
        String key = TASK + task.getTaskType();
        //获取5分钟之后的时间  毫秒值
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MINUTE, 5);
        long nextScheduleTime = calendar.getTimeInMillis();
        redisTemplate.opsForZSet().add(key, JSON.toJSONString(task), nextScheduleTime);

        //3。如果任务的执行时间小于等于当前时间，直接发送MQ通知消费
        if (task.getExecuteTime().getTime() <= System.currentTimeMillis()) {
            //发送异步消息
            kafkaTemplate.send(WmNewsConstants.NEWS_PUBLISH_TOPIC, JSON.toJSONString(task));
        } else if (task.getExecuteTime().getTime() <= nextScheduleTime) {
            //2.2 如果任务的执行时间大于当前时间 && 小于等于预设时间（未来5分钟） 存入zset中
            redisTemplate.opsForZSet().add(key, JSON.toJSONString(task), task.getExecuteTime().getTime());
        }
        return task.getTaskId();
    }

    /**
     * 数据库任务同步到redis(自动调用，每五分钟触发一次)
     */
    @Scheduled(cron = "* 0/5 * * * ?")
    public void reloadData() {
        //1.分布式锁 加锁
        Boolean flag = redisTemplate.opsForValue().setIfAbsent("RELOAD_DATA_LOCK", "1", 20, TimeUnit.SECONDS);
        if (!flag) {
            log.info("锁已被占用，等待下次执行");
            return;
        }
        //获取5分钟之后的时间
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MINUTE, 5);
        //查询小于未来5分钟的所有任务
        List<Taskinfo> taskinfoList = taskinfoMapper.selectList(Wrappers.<Taskinfo>lambdaQuery().lt(Taskinfo::getExecuteTime, calendar.getTime()));
        //新增任务到redis
        if (taskinfoList != null && taskinfoList.size() > 0) {
            for (Taskinfo taskinfo : taskinfoList) {
                Task task = new Task();
                BeanUtils.copyProperties(taskinfo, task);
                String key = TASK + task.getTaskType();
                redisTemplate.opsForZSet().add(key, JSON.toJSONString(task), task.getExecuteTime().getTime());
            }
        }
        log.info("数据库任务已经同步到了缓存中");
        //3.删除锁
        redisTemplate.delete("RELOAD_DATA_LOCK");
    }

    /**
     * 扫描可执行的任务(每30s执行一次)
     */
    @Scheduled(cron = "0/30 * * * * ?")
    public void refresh() {
        //1.分布式锁 加锁
        Boolean flag = redisTemplate.opsForValue().setIfAbsent("TASK_REFRESH_LOCK", "1", 20, TimeUnit.SECONDS);
        if (!flag) {
            log.info("锁已被占用，等待下次执行");
            return;
        }
        //2.获取所有未来数据的集合key
        Set<String> futureKes = redisTemplate.opsForZSet().getOperations().keys(TASK + "*");
        for (String futureKey : futureKes) {
            //按照key和分值查询符合条件的数据
            Set<String> tasks = redisTemplate.opsForZSet().rangeByScore(futureKey, 0, System.currentTimeMillis());
            if (CollectionUtils.isEmpty(tasks)) {
                log.info("当前KEY：{}没有需要执行的任务", futureKey);
                continue;
            }
            log.info("key:{}需要执行的任务:{}", futureKey, tasks);
            for (String taskStr : tasks) {
                Task task = JSON.parseObject(taskStr, Task.class);
                kafkaTemplate.send(WmNewsConstants.NEWS_PUBLISH_TOPIC, JSON.toJSONString(task));
                //删除任务
                redisTemplate.delete(futureKey);
                taskinfoMapper.deleteById(task.getTaskId());
                //更新日志
                TaskinfoLogs taskinfoLogs = new TaskinfoLogs();
                BeanUtils.copyProperties(task, taskinfoLogs);
                taskinfoLogs.setStatus(TaskinfoLogs.EXECUTED);
                taskinfoLogsMapper.insert(taskinfoLogs);
            }
        }
        //3.删除锁
        redisTemplate.delete("TASK_REFRESH_LOCK");
    }


}
