package com.xinqi.service.impl;

import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;

import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.xinqi.common.RedisContents;
import com.xinqi.common.SystemContents;
import com.xinqi.dto.Result;
import com.xinqi.entity.Blog;
import com.xinqi.entity.LikedIdTime;
import com.xinqi.entity.User;
import com.xinqi.mapper.BlogMapper;
import com.xinqi.service.BlogService;
import com.xinqi.service.UserService;
import com.xinqi.util.ThreadPoolUtils;
import com.xinqi.util.sqlsession.SingleSqlSession;
import com.xinqi.util.sqlsession.mappersqlsession.BlogMapperSqlSessionUtil;
import com.xinqi.util.sqlsession.sqlsessionarray.SqlSessionArray;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.*;
import org.springframework.stereotype.Service;
import redis.clients.jedis.*;

import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors;

@Slf4j
@Service
public class BlogServiceImpl extends ServiceImpl<BlogMapper, Blog> implements BlogService {
    @Autowired
    StringRedisTemplate stringRedisTemplate;

    @Override
    public List<Blog> getPageByLimited(Page<Blog> pageParam) {
        Page<Blog> blogPage = baseMapper.selectPage(pageParam, null);
        List<Blog> records = blogPage.getRecords();
        return records;
    }

    @Override
    public Integer queryBlogLikesNumber(Long id) {
        String key = "BLOG_LIKED_KEY" + id;
        Set<String> set = stringRedisTemplate.opsForZSet().range(key, 0, -1);
        Integer likeNumber = 0;
        if (set != null) likeNumber = set.size();
        return likeNumber;
    }//这里的数据持久化就由定时任务去执行吧

    @Override
    public Result likeBlog(Long blogId, Long userId) {

        String key = "BLOG_LIKED_KEY" + blogId;
        Double score = stringRedisTemplate.opsForZSet().score(key, userId.toString());
        if (score == null) {
            //如果未点赞，可以点赞
            // 保存用户到Redis的set集合
            stringRedisTemplate.opsForZSet().add(key, userId.toString(), System.currentTimeMillis());
        } else {
            //如果已点赞，取消点赞
            //把用户从Redis的set集合移除
            stringRedisTemplate.opsForZSet().remove(key, userId.toString());
        }
        return Result.ok();
    }

    //在redis里查询该博客下所有点赞的用户ids
    @Override
    public List<Long> queryAllBlogLikesUserIds(Long id) {
        String key = "BLOG_LIKED_KEY" + id;
        return queryAllBlogLikesUserIdsByRedisKey(key);
    }

    private List<Long> queryAllBlogLikesUserIdsByRedisKey(String key) {
        Set<String> stringUserList = stringRedisTemplate.opsForZSet().range(key, 0, -1);
        if (stringUserList == null || stringUserList.isEmpty()) return Collections.emptyList();

        List<Long> ids = stringUserList.stream().map(Long::valueOf).collect(Collectors.toList());

        return ids;
    }

    @Autowired
    private UserService userService;

    //在redis里查询该篇博客里所有点赞的用户users
    @Override
    public List<User> getAllBlogLikesUsersByBlogId(Long id) {
        List<Long> ids = queryAllBlogLikesUserIds(id);

        String idStr = StrUtil.join(",", ids);
        // 根据用户id查询用户 WHERE id IN ( 5 , 1 ) ORDER BY FIELD(id, 5, 1)
        List<User> users = new ArrayList<>(userService.query()
                .in("id", ids).last("ORDER BY FIELD(id," + idStr + ")")
                .list());
        return users;
    }

    //查询redis里的likeList，以Map<String,String> 返回
    @Override
    public Map<String, String> queryAllBlogLikesUserIdWithScores(Long id) {
        String key = "BLOG_LIKED_KEY" + id;
        return queryAllBlogLikesUserIdWithScoresByRedisKey(key);
    }

    //通过BLOG_LIKED_KEY1拿到{1=time1，2=time2}，是map对象，这个map对象的k-v对应zset的value和score
    private Map<String, String> queryAllBlogLikesUserIdWithScoresByRedisKey(String key) {
        Set<String> stringUserIdList = stringRedisTemplate.opsForZSet().range(key, 0, -1);

        if (stringUserIdList == null || stringUserIdList.size() == 0) return new HashMap<>();

        List<String> ids = stringUserIdList.stream().map(String::valueOf).collect(Collectors.toList());
        HashMap<String, String> userIdWithTime = new HashMap<>();
        for (String userId : stringUserIdList) {//暂时找不到可以直接返回List<Double>的api
            String score = stringRedisTemplate.opsForZSet().score(key, userId).toString();
            userIdWithTime.put(userId, score);
        }
        return userIdWithTime;
    }

    //持久化redis里的某篇博客的likeList到数据库
    @Override
    public void durableForLikeListByBogId(Long id) {
        //通过博客id得到该篇博客文章下的所有likeList
        Map<String, String> userIdWithTime = queryAllBlogLikesUserIdWithScores(id);
        //什么都没有那肯定不操作
        if (userIdWithTime == null || userIdWithTime.size() == 0) return;

        updateLikeListByBlogId(id, userIdWithTime);

    }


    //将数据库里的likeList拿出来
    @Override//加载数据
    public HashMap<String, String> getIdsInTableByBlogId(Long id) {
        Blog blog = getById(id);
        String likeList = blog.getLikeList();
        HashMap<String, String> hashMap = (HashMap<String, String>) JSONUtil.toBean(likeList, Map.class);
        return hashMap;
    }


    private void process(List<Long> updateTime,
                         Long lastUpdateTime,
                         Pipeline pipe,
                         ArrayList<String> blogIds) {
        updateTime = JSONUtil.toList(//updateTime的元素是是博客更新时间
                JSONUtil.toJsonStr(pipe.syncAndReturnAll()),
                Long.class);
        log.debug("updateTime is {}", updateTime);
        //->
        ArrayList<String> targetBlogIds = new ArrayList<>();//找出要更新到数据库的点赞信数据对应的是哪些博客
        pipe.clear();//必须清空一下，后续再测试看看行不行
        for (int index = 0; index < updateTime.size(); index++) {
            Long time = updateTime.get(index);
            if (time <= lastUpdateTime) continue;
            String blogId = blogIds.get(index);
            pipe.zrangeWithScores(RedisContents.BLOG_LIKED_KEY + blogId, 0, -1);
            targetBlogIds.add(blogId);//这个是blogId
        }
        Map<Long, Map<String, String>> maps = pipeLineResponseTransformedToMap2(pipe.syncAndReturnAll(), targetBlogIds);
        if (maps == null || maps.size() == 0) return;
        //<-
        for (Map.Entry<Long, Map<String, String>> entry : maps.entrySet()) {
            Long blogId = entry.getKey();
            Map<String, String> likeList = entry.getValue();
            updateLikeListByBlogId(blogId, likeList);//更新数据库
        }
        for (String targetBlogId : targetBlogIds) {//重新设置每一个目标点赞信息的最新更新时间
            stringRedisTemplate.opsForValue().set(RedisContents.BLOG_LIKES_KEY_UPDATE_TIME + targetBlogId, String.valueOf(System.currentTimeMillis()));
        }//TODO可以考虑使用list.size()，这样子就不需要再拼接一次
    }


    @Override//这里搞一个mybatis的xml的批量处理吧
    public Result updateAllLikeListToDatabaseByPipelineV5(String prefix) {
        Jedis jedis = null;
        BlogMapperSqlSessionUtil sessionUtil = null;
        ThreadPoolExecutor executor = null;
        Pipeline pipe = null;
        try {
            jedis = jedisPool.getResource();
            pipe = jedis.pipelined();
            String cursor = "0";
            ArrayList<String> blogIds = new ArrayList<>(RedisContents.PIPELINE_EXECUTE_COUNT);
            ScanParams scanParams = new ScanParams();
            scanParams.match(prefix.concat("*"));// 匹配以 prefix-* 为前缀的 key
            scanParams.count(RedisContents.SCAN_BIGCOUNT_PIPELINE);
            String lastBatchUpdateTime = stringRedisTemplate.opsForValue().get(RedisContents.BATCH_UPDATE_TIME);
            if (lastBatchUpdateTime == null) lastBatchUpdateTime = "0";
            Long lastUpdateTime = Long.valueOf(lastBatchUpdateTime);
            sessionUtil = new BlogMapperSqlSessionUtil();
            ArrayList<Future<Boolean>> futures = new ArrayList<>();
            executor = ThreadPoolUtils.getThreadPoolWithCallerRunsPolicy(SystemContents.CPU_COUNT, SystemContents.CPU_COUNT);

            do {
                // 扫描并获取一部分key
                ScanResult<String> result = jedis.scan(cursor, scanParams);
                // 记录cursor
                cursor = result.getCursor();
                List<String> list = result.getResult();
                if (list == null || list.isEmpty()) break;
                //BLOG:LIKES:KEY:UPDATE:TIME:8997
                for (String key : list) {
                    pipe.get(key);
                    blogIds.add(key.substring(prefix.length(), key.length()));
                }
                processV5(lastUpdateTime, pipe, blogIds, sessionUtil, executor, futures);
                blogIds.clear();
            } while (!cursor.equals("0"));

            stringRedisTemplate.opsForValue().set(RedisContents.BATCH_UPDATE_TIME, String.valueOf(System.currentTimeMillis()));
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                    log.debug("this has an Exception");
                }
            });
        } catch (Exception e) {
            e.printStackTrace();
            return Result.fail("failed");
        } finally {
            if (jedis != null) jedis.close();
            ThreadPoolUtils.shutdownAndGCThreadPool(executor);
            if (pipe != null) pipe.close();
        }
        return Result.ok();
    }

    private void processV5(Long lastUpdateTime,
                           Pipeline pipe,
                           List<String> blogIds,
                           BlogMapperSqlSessionUtil sqlSessionUtil,
                           ThreadPoolExecutor executor,
                           ArrayList<Future<Boolean>> futures) {
        List<Long> updateTime = JSONUtil.toList(//updateTime的元素是是博客更新时间
                JSONUtil.toJsonStr(pipe.syncAndReturnAll()),
                Long.class);
        log.debug("updateTime is {}", updateTime);
        //->
        ArrayList<String> targetBlogIds = new ArrayList<>();//找出要更新到数据库的点赞信数据对应的是哪些博客
        pipe.clear();//必须清空一下，后续再测试看看行不行
        for (int index = 0; index < updateTime.size(); index++) {
            Long time = updateTime.get(index);
            if (time <= lastUpdateTime) continue;
            String blogId = blogIds.get(index);
            pipe.zrangeWithScores(RedisContents.BLOG_LIKED_KEY + blogId, 0, -1);
            targetBlogIds.add(blogId);//这个是blogId
        }
        List<Blog> blogs = pipeLineResponseTransformedToList(pipe.syncAndReturnAll(), targetBlogIds);
        pipe.clear();
        if (blogs == null || blogs.size() == 0) return;
        if (futures.size() == SystemContents.CPU_COUNT) {
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            });
            futures.clear();
        }

        Future<Boolean> f = executor.submit(() -> {
            //更新数据库
            SqlSession session = sqlSessionUtil.getSqlSessionWithNoAutoCommit();
            BlogMapper blogMapper = sqlSessionUtil.getMapper();
            blogMapper.updateLikeListAndLikeNumberByBlogs(blogs);
            session.commit();//再试试？ TODO
            return true;
        });
        futures.add(f);
        for (String targetBlogId : targetBlogIds) {//重新设置每一个目标点赞信息的最新更新时间
            pipe.set(RedisContents.BLOG_LIKES_KEY_UPDATE_TIME + targetBlogId, String.valueOf(System.currentTimeMillis()));
        }
        pipe.syncAndReturnAll();
    }


    @Override//这里搞一个mybatis的xml的批量处理吧
    public Result updateAllLikeListToDatabaseByPipelineV5_4(String prefix) {
        Jedis jedis = null;
        ThreadPoolExecutor executor = null;
        Pipeline pipe = null;
        SqlSessionArray sessionArray = null;
        try {
            jedis = jedisPool.getResource();
            pipe = jedis.pipelined();
            String cursor = "0";
            ArrayList<String> blogIds = new ArrayList<>(RedisContents.PIPELINE_EXECUTE_COUNT);
            ScanParams scanParams = new ScanParams();
            scanParams.match(prefix.concat("*"));// 匹配以 prefix-* 为前缀的 key
            scanParams.count(RedisContents.SCAN_BIGCOUNT_PIPELINE);
            String lastBatchUpdateTime = stringRedisTemplate.opsForValue().get(RedisContents.BATCH_UPDATE_TIME);
            if (lastBatchUpdateTime == null) lastBatchUpdateTime = "0";
            Long lastUpdateTime = Long.valueOf(lastBatchUpdateTime);
            ArrayList<Future<Boolean>> futures = new ArrayList<>();
            executor = ThreadPoolUtils.getThreadPoolWithCallerRunsPolicy(SystemContents.CPU_COUNT, SystemContents.CPU_COUNT);
            sessionArray = new SqlSessionArray();
            do {
                // 扫描并获取一部分key
                ScanResult<String> result = jedis.scan(cursor, scanParams);
                // 记录cursor
                cursor = result.getCursor();
                List<String> list = result.getResult();
                if (list == null || list.isEmpty()) break;
                //BLOG:LIKES:KEY:UPDATE:TIME:8997
                for (String key : list) {
                    pipe.get(key);
                    blogIds.add(key.substring(prefix.length(), key.length()));
                }
                processV5_4(lastUpdateTime, pipe, blogIds, sessionArray, executor, futures);
                blogIds.clear();
            } while (!cursor.equals("0"));

            stringRedisTemplate.opsForValue().set(RedisContents.BATCH_UPDATE_TIME, String.valueOf(System.currentTimeMillis()));
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                    log.debug("this has an Exception");
                }
            });
        } catch (Exception e) {
            e.printStackTrace();
            return Result.fail("failed");
        } finally {
            if (jedis != null) jedis.close();
            ThreadPoolUtils.shutdownAndGCThreadPool(executor);
            if (pipe != null) pipe.close();
            assert sessionArray != null;
            sessionArray.helpGC();
        }
        return Result.ok();
    }

    private void processV5_4(Long lastUpdateTime,
                             Pipeline pipe,
                             List<String> blogIds,
                             SqlSessionArray sessionArray,
                             ThreadPoolExecutor executor,
                             ArrayList<Future<Boolean>> futures) {
        List<Long> updateTime = JSONUtil.toList(//updateTime的元素是是博客更新时间
                JSONUtil.toJsonStr(pipe.syncAndReturnAll()),
                Long.class);
        log.debug("updateTime is {}", updateTime);
        //->
        ArrayList<String> targetBlogIds = new ArrayList<>();//找出要更新到数据库的点赞信数据对应的是哪些博客
        pipe.clear();//必须清空一下，后续再测试看看行不行
        for (int index = 0; index < updateTime.size(); index++) {
            Long time = updateTime.get(index);
            if (time <= lastUpdateTime) continue;
            String blogId = blogIds.get(index);
            pipe.zrangeWithScores(RedisContents.BLOG_LIKED_KEY + blogId, 0, -1);
            targetBlogIds.add(blogId);//这个是blogId
        }
        List<Blog> blogs = pipeLineResponseTransformedToList(pipe.syncAndReturnAll(), targetBlogIds);
        pipe.clear();
        if (blogs == null || blogs.size() == 0) return;
        if (futures.size() == SystemContents.CPU_COUNT) {
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            });
            futures.clear();
        }

        Future<Boolean> f = executor.submit(() -> {//更新数据库
            SingleSqlSession singleSqlSession = sessionArray.getSingleSqlSession();
            SqlSession sqlSession = singleSqlSession.getSqlSession();
            BlogMapper blogMapper = sqlSession.getMapper(BlogMapper.class);
            blogMapper.updateLikeListAndLikeNumberByBlogs(blogs);
            singleSqlSession.commit();
            return true;
        });

        //这里要讲解的点是：正常情况下，sessionArray提供的sqlsession连接是绝对够用的，
        // 为什么？因为futures.size() == SystemContents.CPU_COUNT时便会等待线程的返回结果，也就是f.get方法，既然都返回结果了，
        // 也就是说updateLikeListAndLikeNumberByBlogs这里早就执行完毕了，于是乎该线程占用的sqlsession一定是会提交了的，
        // 于是呢该sqlsession便空闲下来了，所以说sqlsession连接绝对是够用了的。
        futures.add(f);
        for (String targetBlogId : targetBlogIds) {//重新设置每一个目标点赞信息的最新更新时间
            pipe.set(RedisContents.BLOG_LIKES_KEY_UPDATE_TIME + targetBlogId, String.valueOf(System.currentTimeMillis()));
        }
        pipe.syncAndReturnAll();
    }

    @Override//这里搞一个mybatis的xml的批量处理吧
    public Result updateAllLikeListToDatabaseByPipelineV5_5(String prefix) {
        Jedis jedis = null;
        ThreadPoolExecutor executor = null;
        Pipeline pipe = null;
        SqlSessionArray sessionArray = null;
        try {
            jedis = jedisPool.getResource();
            pipe = jedis.pipelined();
            String cursor = "0";
            ArrayList<String> blogIds = new ArrayList<>(RedisContents.PIPELINE_EXECUTE_COUNT);
            ScanParams scanParams = new ScanParams();
            scanParams.match(prefix.concat("*"));// 匹配以 prefix-* 为前缀的 key
            scanParams.count(RedisContents.SCAN_BIGCOUNT_PIPELINE);
            String lastBatchUpdateTime = stringRedisTemplate.opsForValue().get(RedisContents.BATCH_UPDATE_TIME);
            if (lastBatchUpdateTime == null) lastBatchUpdateTime = "0";
            Long lastUpdateTime = Long.valueOf(lastBatchUpdateTime);
            ArrayList<Future<Boolean>> futures = new ArrayList<>();
            executor = ThreadPoolUtils.getThreadPoolWithCallerRunsPolicy(SystemContents.CPU_COUNT, SystemContents.CPU_COUNT);
            sessionArray = new SqlSessionArray();
            do {
                // 扫描并获取一部分key
                ScanResult<String> result = jedis.scan(cursor, scanParams);
                // 记录cursor
                cursor = result.getCursor();
                List<String> list = result.getResult();
                if (list == null || list.isEmpty()) break;
                //BLOG:LIKES:KEY:UPDATE:TIME:8997
                for (String key : list) {
                    pipe.get(key);
                    blogIds.add(key.substring(prefix.length(), key.length()));
                }
                processV5_5(lastUpdateTime, pipe, blogIds, sessionArray, executor, futures);
                blogIds.clear();
            } while (!cursor.equals("0"));

            stringRedisTemplate.opsForValue().set(RedisContents.BATCH_UPDATE_TIME, String.valueOf(System.currentTimeMillis()));
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                    log.debug("this has an Exception");
                }
            });
        } catch (Exception e) {
            e.printStackTrace();
            return Result.fail("failed");
        } finally {
            if (jedis != null) jedis.close();
            ThreadPoolUtils.shutdownAndGCThreadPool(executor);
            if (pipe != null) pipe.close();
            assert sessionArray != null;
            sessionArray.helpGC();
        }
        return Result.ok();
    }

    private void processV5_5(Long lastUpdateTime,
                             Pipeline pipe,
                             List<String> blogIds,
                             SqlSessionArray sessionArray,
                             ThreadPoolExecutor executor,
                             ArrayList<Future<Boolean>> futures) {
        List<Long> updateTime = JSONUtil.toList(//updateTime的元素是是博客更新时间
                JSONUtil.toJsonStr(pipe.syncAndReturnAll()),
                Long.class);
        log.debug("updateTime is {}", updateTime);
        //->
        ArrayList<String> targetBlogIds = new ArrayList<>();//找出要更新到数据库的点赞信数据对应的是哪些博客
        pipe.clear();//必须清空一下，后续再测试看看行不行
        for (int index = 0; index < updateTime.size(); index++) {
            Long time = updateTime.get(index);
            if (time <= lastUpdateTime) continue;
            String blogId = blogIds.get(index);
            pipe.zrangeWithScores(RedisContents.BLOG_LIKED_KEY + blogId, 0, -1);
            targetBlogIds.add(blogId);//这个是blogId
        }
        List<Blog> blogs = pipeLineResponseTransformedToList(pipe.syncAndReturnAll(), targetBlogIds);
        pipe.clear();
        if (blogs == null || blogs.size() == 0) return;
        if (futures.size() == SystemContents.CPU_COUNT) {
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            });
            futures.clear();
        }

        Future<Boolean> f = executor.submit(() -> {//更新数据库
            SingleSqlSession singleSqlSession = sessionArray.getSingleSqlSession();
            SqlSession sqlSession = singleSqlSession.getSqlSession();
            BlogMapper blogMapper = sqlSession.getMapper(BlogMapper.class);
            blogMapper.batchUpdateLikeListAndLikeNumberWithTempTable(blogs);
            singleSqlSession.commit();
            return true;
        });

        //这里要讲解的点是：正常情况下，sessionArray提供的sqlsession连接是绝对够用的，
        // 为什么？因为futures.size() == SystemContents.CPU_COUNT时便会等待线程的返回结果，也就是f.get方法，既然都返回结果了，
        // 也就是说updateLikeListAndLikeNumberByBlogs这里早就执行完毕了，于是乎该线程占用的sqlsession一定是会提交了的，
        // 于是呢该sqlsession便空闲下来了，所以说sqlsession连接绝对是够用了的。
        futures.add(f);
        for (String targetBlogId : targetBlogIds) {//重新设置每一个目标点赞信息的最新更新时间
            pipe.set(RedisContents.BLOG_LIKES_KEY_UPDATE_TIME + targetBlogId, String.valueOf(System.currentTimeMillis()));
        }
        pipe.syncAndReturnAll();
    }


    @Override//这里搞一个mybatis的xml的批量处理吧
    public Result updateAllLikeListToDatabaseByPipelineV5_2(String prefix) {
        Jedis jedis = null;
        BlogMapperSqlSessionUtil sessionUtil = null;
        ThreadPoolExecutor executor = null;
        Pipeline pipe = null;
        try {
            jedis = jedisPool.getResource();
            pipe = jedis.pipelined();
            String cursor = "0";
            ArrayList<String> blogIds = new ArrayList<>(RedisContents.PIPELINE_EXECUTE_COUNT);
            ScanParams scanParams = new ScanParams();
            scanParams.match(prefix.concat("*"));// 匹配以 prefix-* 为前缀的 key
            scanParams.count(RedisContents.SCAN_BIGCOUNT_PIPELINE);
            String lastBatchUpdateTime = stringRedisTemplate.opsForValue().get(RedisContents.BATCH_UPDATE_TIME);
            if (lastBatchUpdateTime == null) lastBatchUpdateTime = "0";
            Long lastUpdateTime = Long.valueOf(lastBatchUpdateTime);
            sessionUtil = new BlogMapperSqlSessionUtil();
            ArrayList<Future<Boolean>> futures = new ArrayList<>();
            executor = ThreadPoolUtils.getThreadPoolWithCallerRunsPolicy(SystemContents.CPU_COUNT, SystemContents.CPU_COUNT);

            do {
                // 扫描并获取一部分key
                ScanResult<String> result = jedis.scan(cursor, scanParams);
                // 记录cursor
                cursor = result.getCursor();
                List<String> list = result.getResult();
                if (list == null || list.isEmpty()) break;
                //BLOG:LIKES:KEY:UPDATE:TIME:8997
                for (String key : list) {
                    pipe.get(key);
                    blogIds.add(key.substring(prefix.length(), key.length()));
                }
                processV5_2(lastUpdateTime, pipe, blogIds, sessionUtil, executor, futures);
                blogIds.clear();
            } while (!cursor.equals("0"));

            stringRedisTemplate.opsForValue().set(RedisContents.BATCH_UPDATE_TIME, String.valueOf(System.currentTimeMillis()));
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            });
        } catch (Exception e) {
            e.printStackTrace();
            return Result.fail("failed");
        } finally {
            if (jedis != null) jedis.close();
            ThreadPoolUtils.shutdownAndGCThreadPool(executor);
            if (pipe != null) pipe.close();
            assert sessionUtil != null;
            sessionUtil.closeSqlSession();
        }
        return Result.ok();
    }

    private void processV5_2(Long lastUpdateTime,
                             Pipeline pipe,
                             List<String> blogIds,
                             BlogMapperSqlSessionUtil sqlSessionUtil,
                             ThreadPoolExecutor executor,
                             ArrayList<Future<Boolean>> futures) {
        List<Long> updateTime = JSONUtil.toList(//updateTime的元素是是博客更新时间
                JSONUtil.toJsonStr(pipe.syncAndReturnAll()),
                Long.class);
        log.debug("updateTime is {}", updateTime);
        //->
        ArrayList<String> targetBlogIds = new ArrayList<>();//找出要更新到数据库的点赞信数据对应的是哪些博客
        pipe.clear();//必须清空一下，后续再测试看看行不行
        for (int index = 0; index < updateTime.size(); index++) {
            Long time = updateTime.get(index);
            if (time <= lastUpdateTime) continue;
            String blogId = blogIds.get(index);
            pipe.zrangeWithScores(RedisContents.BLOG_LIKED_KEY + blogId, 0, -1);
            targetBlogIds.add(blogId);//这个是blogId
        }
        List<Blog> blogs = pipeLineResponseTransformedToList(pipe.syncAndReturnAll(), targetBlogIds);
        pipe.clear();
        if (blogs == null || blogs.size() == 0) return;
        if (futures.size() == SystemContents.CPU_COUNT) {
            futures.forEach(f -> {
                try {
                    f.get();
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            });
            futures.clear();
        }

        Future<Boolean> f = executor.submit(() -> {
            //更新数据库
            SqlSession session = sqlSessionUtil.getSqlSessionWithAutoCommit();
            BlogMapper blogMapper = sqlSessionUtil.getMapperWithAutoCommit();
            blogMapper.updateLikeListAndLikeNumberByBlogs(blogs);
            return true;//可以说可以，但是太麻烦可能是耗时太久了
        });
        futures.add(f);
        for (String targetBlogId : targetBlogIds) {//重新设置每一个目标点赞信息的最新更新时间
            pipe.set(RedisContents.BLOG_LIKES_KEY_UPDATE_TIME + targetBlogId, String.valueOf(System.currentTimeMillis()));
        }
        pipe.syncAndReturnAll();
    }

    private List<Blog> pipeLineResponseTransformedToList(List<Object> response, ArrayList<String> blogKeyIds) {
        if (blogKeyIds == null || blogKeyIds.isEmpty()) return null;
        log.debug("response is {}", response);
        int count = 0;

        if (response == null || response.size() == 0) return null;

        String jsonStr = JSONUtil.toJsonStr(response);
        log.debug("jsonStr is {}", jsonStr);//[[{"score":1.688218686914E12,"element":"1"},{"score":1.688375549984E12,"element":"2"}]]
        List<List> lists = JSONUtil.toList(jsonStr, List.class);

        log.debug("lists is {}", lists);//[[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]]
        if (lists == null || lists.isEmpty()) return null;

        List<Blog> blogs = new ArrayList<>(blogKeyIds.size());
        for (int i = 0; i < lists.size(); i++) {
            List list = lists.get(i);
            log.debug("list is {}", list);//[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]
            Map<String, String> likeListMap = new HashMap<>();
            Blog blog = new Blog();
            blog.setId(Long.valueOf(blogKeyIds.get(i)));
            for (Object o : list) {
                String likedIdTimeStr = JSONUtil.toJsonStr(o);
                LikedIdTime likedIdTime = JSONUtil.toBean(likedIdTimeStr, LikedIdTime.class);
                log.debug("likedIdTime is {}", likedIdTime);
                if (likedIdTime == null) continue;
                likeListMap.put(likedIdTime.getElement(), likedIdTime.getScore());
            }
            blog.setLikeList(likeListMap.toString());
            blog.setLikeNumber(likeListMap.size());
            blogs.add(blog);
        }
        return blogs;
    }


    /**
     * 通过blog_like的id字段更新like_list，like_list从userIdWithTime获得。
     *
     * @param blogId
     * @param userIdWithTime
     * @return
     */
    private boolean updateLikeListByBlogId(Long blogId, Map<String, String> userIdWithTime) {
        String stringIds = userIdWithTime.toString();

        String updateSql = "like_list = " + "'" + stringIds + "' ," +
                "like_number = " + userIdWithTime.size();
        boolean isSuccess = update()
                .setSql(updateSql)
                .eq("id", blogId)
                .update();
        return isSuccess;
    }


    private Map<Long, Map<String, String>> pipeLineResponseTransformedToMap2(List<Object> response, ArrayList<String> blogKeyIds) {
        if (blogKeyIds == null || blogKeyIds.isEmpty()) return null;
        log.debug("response is {}", response);
        int count = 0;

        if (response == null || response.size() == 0) return null;

        String jsonStr = JSONUtil.toJsonStr(response);
        log.debug("jsonStr is {}", jsonStr);//[[{"score":1.688218686914E12,"element":"1"},{"score":1.688375549984E12,"element":"2"}]]
        List<List> lists = JSONUtil.toList(jsonStr, List.class);

        log.debug("lists is {}", lists);//[[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]]
        if (lists == null || lists.isEmpty()) return null;


        Map<Long, Map<String, String>> maps = new HashMap<>(blogKeyIds.size());
        for (int i = 0; i < lists.size(); i++) {
            List list = lists.get(i);
            log.debug("list is {}", list);//[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]
            Long blogKeyId = Long.valueOf(blogKeyIds.get(i));
            Map<String, String> likeListMap = new HashMap<>();
            maps.put(blogKeyId, likeListMap);
            for (Object o : list) {
                String likedIdTimeStr = JSONUtil.toJsonStr(o);
                LikedIdTime likedIdTime = JSONUtil.toBean(likedIdTimeStr, LikedIdTime.class);
                log.debug("likedIdTime is {}", likedIdTime);
                if (likedIdTime == null) continue;
                likeListMap.put(likedIdTime.getElement(), likedIdTime.getScore());
            }
        }
        return maps;
    }


    @Autowired
    private JedisPool jedisPool;

    public Set<String> getAllRedisKeyByScanThroughMatch(String prefix) {//找不到stringRedisTemplate对Zset里键值对扫描的资料
        Jedis jedis = null;
        Set<String> blogLikeList = new HashSet<>();
        ScanParams scanParams = new ScanParams();
        try {
            jedis = jedisPool.getResource();
            String cursor = "0";
            do {
                // 扫描并获取一部分key
                ScanResult<String> result = jedis.scan(cursor, scanParams.match(prefix.concat("*")));//https://www.cnblogs.com/xubiao/p/8489522.html
                // 记录cursor
                cursor = result.getCursor();
                List<String> list = result.getResult();
                if (list == null || list.isEmpty()) {
                    break;
                }

                // 遍历
                for (String key : list) {
                    log.debug("key is {}", key);//这里可以看到有哪些key
                    blogLikeList.add(key);
                }

            } while (!cursor.equals("0"));
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (jedis != null) jedis.close();
        }
        return blogLikeList;
    }


    /**
     * 这个方法没问题,以及测试过了，zrange BLOG_LIKED_KEY1 0 -1 withscores
     * collection表示的是集合：[BLOG_LIKED_KEY+所有博客id]，通过pipeline得到maps
     * maps的键是类似于{1={1=1688218686914, 2=1688398456605}},
     * 后面会写一个api该api可以通过maps更新blog_like表里多行数据的like_list字段（现在是20230704-0：06，以及写好了，就updateAllLikeListToDatabaseByPipeline1这个方法先吧）
     *
     * @param collection 在redis里表示的是存储了的key，在这个项目里，这里是指zset的key的集合
     * @return
     */

    public Map<Long, Map<String, String>> getIdWithTimeListsByPipelineByJedis(Collection<String> collection) {

        String prefix = "BLOG_LIKED_KEY";
        Jedis jedis = null;
        Map<Long, Map<String, String>> maps = null;
        try {
            jedis = jedisPool.getResource();
            Pipeline pipe = jedis.pipelined();
            ArrayList<Long> blogKeyIds = new ArrayList<>(collection.size());
            for (String key : collection) {//TODO 1、控制管道的命令的数量，这里是需要优化的。2、另外还可以多线程发送管道命令，
                pipe.zrangeWithScores(key, 0, -1);
                blogKeyIds.add(Long.parseLong(key.substring(prefix.length(), key.length())));
            }
            List<Object> response = pipe.syncAndReturnAll();
            maps = pipeLineResponseTransformedToMap(response, blogKeyIds);
        } catch (NumberFormatException e) {
            e.printStackTrace();
        } finally {
            jedis.close();
        }
        log.debug("maps is {}", maps);
        return maps;
    }

    private Map<Long, Map<String, String>> pipeLineResponseTransformedToMap(List<Object> response, ArrayList<Long> blogKeyIds) {
        if (blogKeyIds == null || blogKeyIds.isEmpty()) return null;
        log.debug("response is {}", response);
        int count = 0;

        if (response == null || response.size() == 0) return null;

        String jsonStr = JSONUtil.toJsonStr(response);
        log.debug("jsonStr is {}", jsonStr);//[[{"score":1.688218686914E12,"element":"1"},{"score":1.688375549984E12,"element":"2"}]]
        List<List> lists = JSONUtil.toList(jsonStr, List.class);

        log.debug("lists is {}", lists);//[[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]]
        if (lists == null || lists.isEmpty()) return null;


        Map<Long, Map<String, String>> maps = new HashMap<>(blogKeyIds.size());
        for (int i = 0; i < lists.size(); i++) {
            List list = lists.get(i);
            log.debug("list is {}", list);//[{"score":1688218686914,"element":"1"}, {"score":1688375549984,"element":"2"}]

            Long blogKeyId = blogKeyIds.get(i);
            Map<String, String> likeListMap = new HashMap<>();
            maps.put(blogKeyId, likeListMap);
            for (Object o : list) {
                String likedIdTimeStr = JSONUtil.toJsonStr(o);
                LikedIdTime likedIdTime = JSONUtil.toBean(likedIdTimeStr, LikedIdTime.class);
                log.debug("likedIdTime is {}", likedIdTime);
                if (likedIdTime == null) continue;
                likeListMap.put(likedIdTime.getElement(), likedIdTime.getScore());
            }
        }
        return maps;
    }


}
