package com.heima.schedule.service.Impl;

import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.codahale.metrics.Gauge;
import com.heima.common.constants.ScheduleConstants;
import com.heima.common.redis.CacheService;
import com.heima.model.schedule.dto.Task;
import com.heima.model.schedule.pojos.Taskinfo;
import com.heima.model.schedule.pojos.TaskinfoLogs;
import com.heima.schedule.mapper.TaskinfoLogsMapper;
import com.heima.schedule.mapper.TaskinfoMapper;
import com.heima.schedule.service.TaskService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.codehaus.jackson.map.util.BeanUtil;
import org.springframework.aop.framework.AopContext;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeansException;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.connection.RedisStringCommands;
import org.springframework.data.redis.connection.StringRedisConnection;
import org.springframework.data.redis.core.*;
import org.springframework.data.redis.core.types.Expiration;
import org.springframework.lang.Nullable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.io.IOException;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;

@Slf4j
@Service
public class TaskServiceImpl implements TaskService {
    @Resource
    private TaskinfoMapper taskinfoMapper;
    @Resource
    private TaskinfoLogsMapper taskinfoLogsMapper;
    @Resource
    private CacheService cacheService;
    @Resource
    private StringRedisTemplate stringRedisTemplate;
    @Resource
    private TaskService taskService;
    /**
     * 添加任务
     *
     * @param task
     * @return
     */
    @Override
    public long addTask(Task task) {
        Runnable addCacheAction = () -> addTaskToRedis(task);
        //添加任务到数据库
        //类内自调用会导致事务失效-已处理 开启代理暴露 手动代理
        boolean success = ((TaskService) AopContext.currentProxy()).addTaskToDB(task);
        if (success) {
            //添加任务到redis
            addCacheAction.run(); // 执行封装好的“添加到Redis”逻辑
        }
        return task.getTaskId();
    }
    public void addTaskToRedis(Task task) {
        // 1. 获取当前时间的瞬时值（UTC时间，精度到纳秒）
        Instant now = Instant.now();
        // 2. 加上5分钟（5分钟 = 300秒）
        Instant fiveMinutesLater = now.plusSeconds(300);
        // 3. 转换为毫秒值（从1970-01-01 00:00:00 UTC开始的毫秒数）
        long milliseconds = fiveMinutesLater.toEpochMilli();
        String key=task.getTaskType()+"_"+task.getPriority();
        if(task.getExecuteTime()<=System.currentTimeMillis()){
            stringRedisTemplate.opsForList().leftPush(ScheduleConstants.TOPIC+key, JSON.toJSONString(task));
        }else if(task.getExecuteTime()<=milliseconds){
            stringRedisTemplate.opsForZSet().add(ScheduleConstants.FUTURE+key, JSON.toJSONString(task),task.getExecuteTime());
        }
    }

    /**
     * 取消任务
     *
     * @param taskId
     * @return
     */
    @Override
    public boolean cancelTask(long taskId) {
        boolean b = false;
        Callable<Task> taskGauge = () -> ((TaskService)AopContext.currentProxy())
                .updateDB(taskId, ScheduleConstants.CANCELLED);
        if(taskId<=0){
            log.error("任务id不能小于0");
            return false;
        }
        Task task = new Task();
        //删除数据库信息 更新日志
        try {
            task = taskGauge.call();
        } catch (Exception e) {
            throw new RuntimeException("删除日志失败",e);
        }
        if(task!=null){
            removeTaskFromRedis(task);
            b=true;
        }
        //删除redis信息
        return b;
    }

    /**
     * 删除任务
     *
     * @param task
     */
    private void removeTaskFromRedis(Task task) {
        String key=task.getTaskType()+"_"+task.getPriority();
        if(task.getExecuteTime()<=System.currentTimeMillis()){
            //删除redis信息
            // index = 0删除列表中 所有 与目标值相等的元素
            // index > 0：从列表头部开始，删除最多 index 个与目标值相等的元素；
            // index < 0：从列表尾部开始，删除最多 |index| 个与目标值相等的元素；
            stringRedisTemplate.opsForList().remove(ScheduleConstants.TOPIC+key,0,JSON.toJSONString(task));
        }else {
            stringRedisTemplate.opsForZSet().remove(ScheduleConstants.FUTURE+key,0,JSON.toJSONString(task));
        }
    }

    /**
     * 更新任务
     *
     * @param taskId
     * @param status
     * @return
     */
    @Transactional(rollbackFor = Exception.class)
    public Task updateDB(long taskId, int cancelled) {
        Task task = null;
        try {
            taskinfoMapper.deleteById(taskId);
            log.info("删除任务成功");
            TaskinfoLogs taskinfoLogs = taskinfoLogsMapper.selectById(taskId);
            taskinfoLogs.setStatus(cancelled);
            taskinfoLogsMapper.updateById(taskinfoLogs);
            log.info("更新任务日志成功");
            task = new Task();
            BeanUtils.copyProperties(taskinfoLogs,task);
            task.setExecuteTime(taskinfoLogs.getExecuteTime().getTime());
        } catch (Exception e) {
            log.error("更新任务失败",e);
            throw new RuntimeException(e);
        }
        return task;
    }


    /**
     * 添加任务到数据库
     *
     * @param task
     */
    @Override
    @Transactional(rollbackFor = Exception.class)
    public boolean addTaskToDB(Task task) {
        boolean b = false;
        try {
            //保存任务表
            Taskinfo taskinfo = new Taskinfo();
            BeanUtils.copyProperties(task,taskinfo);
            taskinfo.setExecuteTime(new Date(task.getExecuteTime()));
            taskinfoMapper.insert(taskinfo);
            task.setTaskId(taskinfo.getTaskId());
            //保存任务日志数据
            TaskinfoLogs taskinfoLogs = new TaskinfoLogs();
            BeanUtils.copyProperties(taskinfo,taskinfoLogs);
            taskinfoLogs.setVersion(ScheduleConstants.VERSION);
            taskinfoLogs.setStatus(ScheduleConstants.SCHEDULED);
            taskinfoLogsMapper.insert(taskinfoLogs);
            b = true;
        } catch (Exception e) {
            log.error(ScheduleConstants.ADDTODB,e);
            throw new RuntimeException(ScheduleConstants.ADDTODB,e);
        }
        return b;
    }

    /**
     * 获取任务
     *
     * @param type
     * @param priority
     * @return
     */
    @Override
    public Task poll(int type, int priority) {
        Task task=null;
        try {
            String key=type+"_"+priority;
            String taskJson = stringRedisTemplate.opsForList().rightPop(ScheduleConstants.TOPIC + key);
            if (StringUtils.isNoneBlank(taskJson)) {
                task = JSON.parseObject(taskJson, Task.class);
                if (task == null) throw new RuntimeException("任务为空");
                ((TaskService) AopContext.currentProxy()).updateDB(task.getTaskId(), ScheduleConstants.EXECUTED);
            }
        } catch (Exception e) {
            log.error("获取任务失败",e);
            throw new RuntimeException(e);
        }
        return task;
    }


    /**
     * 尝试获取锁
     * @param name
     * @param expire
     * @return
     */
    public String tryLock(String name,long expire){
        name=name+"_lock";
        String key= UUID.randomUUID().toString().replace("_","");
        RedisConnectionFactory connectionFactory=stringRedisTemplate.getConnectionFactory();
        RedisConnection redisConnection=connectionFactory.getConnection();
        try {
            Boolean result=redisConnection.set(
                    name.getBytes(),
                    key.getBytes(),
                    Expiration.from(expire, TimeUnit.MILLISECONDS),
                    RedisStringCommands.SetOption.SET_IF_ABSENT
            );
            if(result!=null && result)
                return key;
        } finally {
            RedisConnectionUtils.releaseConnection(redisConnection,connectionFactory,false);
        }
        return null;
    }
    /**
     * 扫描reids
     * @param patten
     * @return
     */
    public Set<String> scan(String patten){
        Set<String> keys = stringRedisTemplate.execute((RedisCallback<Set<String>>) connection -> {
            Set<String> result = new HashSet<>();
            try (Cursor<byte[]> cursor = connection.scan(new ScanOptions.ScanOptionsBuilder()
                    .match(patten).count(10000).build())) {
                while (cursor.hasNext()) {
                    result.add(new String(cursor.next()));
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
            return result;
        });
        return  keys;
    }

    @Scheduled(cron = "0 */1 * * * ?")
    public void refresh(){
        String token = tryLock("FUTURE_TASK_SYNC", 1000 * 30);
        if(StringUtils.isNotBlank(token)){
            log.info("未来数据定时刷新---定时任务");

            //获取所有未来数据的集合key
            Set<String> futureKeys = scan(ScheduleConstants.FUTURE + "*");
            for (String futureKey : futureKeys) {//future_100_50

                //获取当前数据的key  topic
                String topicKey = ScheduleConstants.TOPIC+futureKey.split(ScheduleConstants.FUTURE)[1];

                //按照key和分值查询符合条件的数据

                Set<String> tasks =stringRedisTemplate.opsForZSet().rangeByScore(futureKey, 0, System.currentTimeMillis());

                //同步数据
                if(!tasks.isEmpty()){
                    refreshWithPipeline(futureKey,topicKey,tasks);
                    log.info("成功的将"+futureKey+"刷新到了"+topicKey);
                }
            }
        }
    }

    /**
     * 批量刷新缓存
     * @param future_key
     * @param topic_key
     * @param values
     * @return
     */
    public List<Object> refreshWithPipeline(String future_key,String topic_key,Collection<String> values){

        List<Object> objects = stringRedisTemplate.executePipelined(new RedisCallback<Object>() {
            @Nullable
            @Override
            public Object doInRedis(RedisConnection redisConnection) throws DataAccessException {
                StringRedisConnection stringRedisConnection = (StringRedisConnection)redisConnection;
                String[] strings = values.toArray(new String[values.size()]);
                stringRedisConnection.rPush(topic_key,strings);
                stringRedisConnection.zRem(future_key,strings);
                return null;
            }
        });
        return objects;
    }

    @Scheduled(cron = "0 */5 * * * ?")
    @PostConstruct
    public void reloadData() {
        clearCache();
        log.info("数据库数据同步到缓存");
        Calendar calendar = Calendar.getInstance();
        calendar.add(Calendar.MINUTE, 5);

        //查看小于未来5分钟的所有任务
        List<Taskinfo> allTasks = taskinfoMapper.selectList(Wrappers.<Taskinfo>lambdaQuery()
                .lt(Taskinfo::getExecuteTime,calendar.getTime()));
        if(allTasks != null && !allTasks.isEmpty()){
            for (Taskinfo taskinfo : allTasks) {
                Task task = new Task();
                BeanUtils.copyProperties(taskinfo,task);
                task.setExecuteTime(taskinfo.getExecuteTime().getTime());
                addTaskToRedis(task);
            }
        }
    }

    private void clearCache(){
        // 删除缓存中未来数据集合和当前消费者队列的所有key
        Set<String> futurekeys = scan(ScheduleConstants.FUTURE + "*");// future_
        Set<String> topickeys = scan(ScheduleConstants.TOPIC + "*");// topic_
        stringRedisTemplate.delete(futurekeys);
        stringRedisTemplate.delete(topickeys);
    }

}
