package com.qf.leadnewsschedule.service.impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.StringUtils;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.qf.leadnewsmodel.consts.TaskConst;
import com.qf.leadnewsmodel.dtos.ResponseResult;
import com.qf.leadnewsmodel.enums.AppHttpCodeEnum;
import com.qf.leadnewsschedule.mapper.TaskInfoMapper;
import com.qf.leadnewsmodel.pojos.schedule.Task;
import com.qf.leadnewsschedule.model.pojos.Taskinfo;
import com.qf.leadnewsschedule.model.pojos.TaskinfoLogs;
import com.qf.leadnewsschedule.service.RedisService;
import com.qf.leadnewsschedule.service.TaskinfoLogsService;
import com.qf.leadnewsschedule.service.TaskinfoService;
import com.qf.leadnewsutils.commons.ProtostuffUtil;
import lombok.extern.slf4j.Slf4j;
import org.redisson.api.RLock;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.time.LocalDateTime;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;

@Service
@Slf4j
public class TaskinfoServiceImpl extends ServiceImpl<TaskInfoMapper, Taskinfo> implements TaskinfoService {

    @Autowired  //实际注入的是一个代理对象
    private TaskinfoService taskinfoService;

    @Autowired
    private TaskinfoLogsService taskinfoLogsService;

    @Autowired
    private RedisService redisService;

    @Override
    public ResponseResult add(Task task) {

        //将任务数据添加到数据库表中
        //非事务方法，内部调用带事务方法，会导致事务失效
        Taskinfo taskinfo = taskinfoService.save2Db(task);

        if (taskinfo != null){
            //将任务数据添加到redis中.这个操作不影响数据库的添加事务
            save2Redis(taskinfo);

            //返回成功
            return ResponseResult.okResult(taskinfo);
        }

        return ResponseResult.errorResult(AppHttpCodeEnum.TASK_ADD_FAIL);
    }

    /**
     * 将任务信息添加到redis中
     * @param taskinfo
     */
    private void save2Redis(Taskinfo taskinfo) {
        //任务执行时间
        Date executeTime = taskinfo.getExecuteTime();
        //获取未来5分钟对应的Date
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MINUTE,5);
        Date futrueTime = calendar.getTime();

        long time = executeTime.getTime();
        if (time <=futrueTime.getTime()){

            String keySuffix = buildSuffix(taskinfo.getTaskType(),taskinfo.getPriority());
            /*//反序列化成为String类型  dubbo3 -- GRPC --
            byte[] parameters = taskinfo.getParameters();
            //这个序列化器效率比jdk好，存储空间比jdk小
            String paramStr = ProtostuffUtil.deserialize(parameters, String.class);*/

            String paramStr = JSON.toJSONString(taskinfo);

            long currentTime = new Date().getTime();

//            log.info("=======time: {}",time);
//            log.info("=======currentTime: {}",currentTime);

            if (time<= currentTime){
                //添加到实时消费的redis队列中
                /*
                    向redis中存入的数据 key-value 格式
                        key 可以用任务的类型和优先级来充当
                        value 可以使用任务的参数来保存。比如发布文章任务，参数就是这个文章的信息
                 */

                redisService.save2List(TaskConst.NOW_CONSUME_PREFIX+keySuffix,paramStr);
            }else{
                //添加到预热的redis队列中 - zset(排序)
                redisService.save2ZSet(TaskConst.FUTRUE_CONSUME_PREFIX+keySuffix,time,paramStr);
            }
        }
    }

    @Transactional
    public Taskinfo save2Db(Task task) {
        try {
            //保存数据到taskinfo表
            Taskinfo taskinfo = new Taskinfo();
            BeanUtils.copyProperties(task, taskinfo);
            //因为两个实体的数据类型不一致，不能直接复制
            taskinfo.setExecuteTime(new Date(task.getExecuteTime()));
            save(taskinfo);

            //保存数据到taskinfologs表
            TaskinfoLogs taskinfoLogs = new TaskinfoLogs();
            //不能拷贝task的信息，因为task中是没有任务id信息的
            BeanUtils.copyProperties(taskinfo, taskinfoLogs);
            taskinfoLogs.setStatus(TaskConst.INIT_STATUS); //初始状态
            taskinfoLogsService.save(taskinfoLogs);

            return taskinfo;
        }catch (Exception e){
            throw e;
        }
    }

    @Override
    @Transactional
    public ResponseResult consumeTask(int taskType, int priority) {
        //从实时消费的List中，获取该类型该优先级的任务
        String keySuffix = buildSuffix(taskType,priority);
        String key = TaskConst.NOW_CONSUME_PREFIX + keySuffix;

        RLock lock = redisService.getReadLock(TaskConst.TASK_MOVE_READ_WRITE_LOCK);

        try {
            //加锁
            lock.lock(20, TimeUnit.SECONDS);
            //获取到的数据应该是一个TaskInfo对象对应的字符串
            String taskInfoStr = redisService.lRightPop(key);

            if (StringUtils.isNotBlank(taskInfoStr)) {
                //从数据库中删除该消费的任务
                Taskinfo taskinfo = JSON.parseObject(taskInfoStr, Taskinfo.class);
                removeById(taskinfo.getTaskId());

                //重新维护任务日志表中任务的状态
                TaskinfoLogs taskinfoLogs = new TaskinfoLogs();
                BeanUtils.copyProperties(taskinfo, taskinfoLogs);
                taskinfoLogs.setStatus(TaskConst.EXECUTED_STATUS);

                taskinfoLogsService.updateById(taskinfoLogs);

                //获取成功后，将任务参数信息返回到调用服务
                byte[] parameters = taskinfo.getParameters();
                String taskParamStr = ProtostuffUtil.deserialize(parameters, String.class);
                return ResponseResult.okResult(taskParamStr);
            }
        }finally {
            //释放锁
            lock.unlock();
        }

        return null;
    }

    private String buildSuffix(int taskType, int priority){
        return taskType + "_" + priority;
    }

    /**
     * 将预热集合中的数据转移到实时消费List中
     * 预热集合中是未来5分钟的数据，我们需要迁移过去的是 时间小于等于当前系统时间的任务
     */
    //每隔5s执行一次
    @Scheduled(cron = "0/5 * * * * ? ")
    public void zSet2List(){
        log.info("===========调度任务执行:" + LocalDateTime.now());

        //先获取所有预热数据的key
        Set<String> keys = redisService.scan(TaskConst.FUTRUE_CONSUME_PREFIX + "*", 1000);

        for (String futrueKey : keys) {
            // futruekey:  FUTRUE:CONSUME:1001_1  --- nowkey:   NOW:CONSUME:1001_1

            //根据key获取时间小于等于当前系统时间的value
            Set<String> values = redisService.zRangeByScore(futrueKey, 0, System.currentTimeMillis());

           /* //移除zset中的预热数据
            redisService.zsetRemove(futrueKey,values.toArray(new String[0]));
            //添加到list中
            String suffix = futrueKey.split(":")[2];
            String nowKey = TaskConst.NOW_CONSUME_PREFIX + suffix;
            redisService.lLeftPush(nowKey,values.toArray(new String[0]);*/

            if (values != null && values.size()>0) {
                //基于管道进行高效的数据转移
                redisService.pipelineRefresh(futrueKey, values);
            }
        }
    }

    /**
     * 将数据库中未来5分钟的数据迁移到redis中
     *  有些是需要进入实时List的
     *  有些是需要进入预热的Set中的
     */
    @Scheduled(cron = "0 0/5 * * * ? ")
    public void db2Redis(){
        //查询数据库中未来5分钟的数据
        //获取未来5分钟对应的Date
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MINUTE,5);
        Date futrueTime = calendar.getTime();

        LambdaQueryWrapper<Taskinfo> wq = new LambdaQueryWrapper<>();
        wq.le(Taskinfo::getExecuteTime,futrueTime);

        RLock lock = redisService.getWriteLock(TaskConst.TASK_MOVE_READ_WRITE_LOCK);

        List<Taskinfo> taskinfos = null;
        try {
            //加写锁
            lock.lock(5,TimeUnit.MINUTES);
            taskinfos = list(wq);
        }finally {
            //释放锁
            lock.unlock();
        }

        //清除原始缓存中的数据
        redisService.clearDataByKey();

        if (taskinfos != null) {
            for (Taskinfo taskinfo : taskinfos) {
                save2Redis(taskinfo);
            }
        }
    }
}
