package com.wei.czz.framework.common.service.impl;

import com.wei.czz.common.constant.EntityConstant;
import com.wei.czz.common.constant.RedisConstant;
import com.wei.czz.common.utils.CacheKeyUtils;
import com.wei.czz.framework.blog.entity.BlogTagEntity;
import com.wei.czz.framework.blog.service.*;
import com.wei.czz.framework.common.handler.redis.RedisHashHandler;
import com.wei.czz.framework.common.handler.redis.RedisZSetHandler;
import com.wei.czz.framework.blog.entity.BlogEntity;
import com.wei.czz.framework.blog.entity.SpaceEntity;
import com.wei.czz.framework.common.service.InitService;
import com.wei.czz.framework.admin.entity.MenuEntity;
import com.wei.czz.framework.admin.entity.UserEntity;
import com.wei.czz.framework.admin.service.MenuService;
import com.wei.czz.framework.admin.service.RoleService;
import com.wei.czz.framework.admin.service.UserService;
import com.wei.czz.framework.common.helper.LimitHelper;
import lombok.AllArgsConstructor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
 * Created by IntelliJ IDEA.
 *
 * @author wei
 * date: 2021-06-23 22:30:00
 * className: InitService 程序缓存初始化服务接口实现类
 * version: 1.0
 * description:
 */
@Service("initService")
@AllArgsConstructor
public class InitServiceImpl implements InitService {

    private static final Logger log = LoggerFactory.getLogger(InitServiceImpl.class);

    private final MenuService menuService;

    private final RoleService roleService;

    private final UserService userService;

    private final TagService tagService;

    private final SpaceService spaceService;

    private final BlogService blogService;

    private final BlogIssueService blogIssueService;

    private final BlogTagService blogTagService;

    private final LimitHelper limitHelper;

    private final RedisHashHandler redisHashHandler;

    private final RedisZSetHandler redisZSetHandler;

    @Override
    public void initMenuCache() {
        log.info("开始初始化系统菜单缓存业务");

        // 查询数据库系统菜单表，获取菜单数据
        List<MenuEntity> menuList = menuService.getMenuList(null);
        // 将菜单列表转化为菜单映射（List -> Map）
        Map<String, MenuEntity> menuMap = menuList.stream().collect(
                Collectors.toMap(menu -> menu.getMenuId().toString(), Function.identity())
        );

        // 向Redis存入所有菜单信息
        redisHashHandler.putAll(CacheKeyUtils.getRedisHashKey(EntityConstant.MENU), menuMap);
        log.info("初始化系统菜单缓存业务结束");
    }

    @Override
    public void initUserCache() {
        log.info("开始初始化系统用户缓存业务");
        // 获取数据库存储的所有用户信息
        List<UserEntity> userList = userService.findUserList();

        // 将普通用户列表转化为普通用户映射（List -> Map）
        Map<String, UserEntity> userMap = userList.stream().collect(
            Collectors.toMap(user -> user.getUserId().toString(), Function.identity())
        );

        // 向Redis存入所有用户数据
        redisHashHandler.putAll(CacheKeyUtils.getRedisHashKey(EntityConstant.USER), userMap);
        log.info("初始化系统用户缓存业务结束");
    }

    @Override
    public void initUserSpaceCache() {
        log.info("开始初始化系统用户个人空间缓存业务");
        // 获取数据库存储的所有用户空间信息
        List<SpaceEntity> spaceList = spaceService.getSpaceList(null);
        if (spaceList == null || spaceList.isEmpty()) {
            return;
        }

        // 根据用户id对所有空间进行分组，获取每个用户关联的空间信息
        Map<Long, List<SpaceEntity>> spaceMap = spaceList.stream().collect(Collectors.groupingBy(SpaceEntity::getUserId));

        Map<Long, Map<String, SpaceEntity>> userSpaceMap = new HashMap<>();
        for (Map.Entry<Long, List<SpaceEntity>> entry : spaceMap.entrySet()) {
            Long userId = entry.getKey();
            Map<String, SpaceEntity> spaceMap2 = entry.getValue().stream()
                    .collect(Collectors.toMap(space -> String.valueOf(space.getSpaceId()), Function.identity()));

            userSpaceMap.put(userId, spaceMap2);
        }
        // 执行Redis事务操作，将所有用户关联的空间信息缓存
        for (Map.Entry<Long, Map<String, SpaceEntity>> entry : userSpaceMap.entrySet()) {
            String userId = String.valueOf(entry.getKey());
            String spaceKey = userId + RedisConstant.SPLIT + RedisConstant.SPACE_MAP;
            redisHashHandler.putAll(spaceKey, entry.getValue());
        }
        log.info("初始化系统用户个人空间缓存业务结束");
    }

    @Override
    public void initBlogCache() {
        log.info("开始初始化系统博客基本信息缓存业务");
        List<BlogEntity> blogList = blogService.getBlogList();
        if (!blogList.isEmpty()) {
            // 博客 List 转 Map
            Map<String, BlogEntity> blogMap = blogList.stream().collect(Collectors.toMap(blog -> blog.getId().toString(), Function.identity()));
            // 操作Redis缓存，批量缓存基本信息
            redisHashHandler.putAll(CacheKeyUtils.getRedisHashKey(EntityConstant.BLOG), blogMap);
        }
        log.info("初始化系统博客基本信息缓存业务结束");

        /*QueryWrapper<BlogEntity> blogWrapper = new QueryWrapper<>();
        blogWrapper.ne("status", 3);
        // 获取数据库存储的所有博客信息，排除被下架的博客
        List<BlogEntity> blogList = blogService.list(blogWrapper);

        if (blogList == null || blogList.size() == 0) {
            return;
        }

        // 获取所有草稿博客，并根据用户id进行分组操作
        Map<Long, List<BlogEntity>> userSaveBlogMap = blogList.stream()
                .filter(blog -> {
                    return blog.getStatus() == 0;
                }).collect(Collectors.groupingBy(BlogEntity::getUserId));
        Map<Long, Map<String, BlogEntity>> userSaveBlogMap2 = new HashMap<>();
        for (Map.Entry<Long, List<BlogEntity>> entry : userSaveBlogMap.entrySet()) {
            Long userId = entry.getKey();
            Map<String, BlogEntity> blogMap = entry.getValue().stream()
                    .collect(Collectors.toMap(blog -> {
                        return String.valueOf(blog.getBlogId());
                    }, Function.identity()));
            userSaveBlogMap2.put(userId, blogMap);
        }

        // 获取所有正在审核博客，并根据用户id进行分组操作
        Map<Long, List<BlogEntity>> userCheckBlogMap = blogList.stream()
                .filter(blog -> {
                    return blog.getStatus() == 1;
                }).collect(Collectors.groupingBy(BlogEntity::getUserId));
        Map<Long, Map<String, BlogEntity>> userCheckBlogMap2 = new HashMap<>();
        for (Map.Entry<Long, List<BlogEntity>> entry : userCheckBlogMap.entrySet()) {
            Long userId = entry.getKey();
            Map<String, BlogEntity> blogMap = entry.getValue().stream()
                    .collect(Collectors.toMap(blog -> {
                        return String.valueOf(blog.getBlogId());
                    }, Function.identity()));
            userCheckBlogMap2.put(userId, blogMap);
        }

        // 获取所有发布的博客
        List<BlogEntity> issueBlogList = blogList.stream()
                .filter(blog -> {
                    return blog.getStatus() == 2;
                }).collect(Collectors.toList());
        // 对已发布的博客根据用户进行分组
        Map<Long, List<BlogEntity>> userIssueBlogMap = issueBlogList.stream().collect(Collectors.groupingBy(BlogEntity::getUserId));
        Map<Long, Map<String, BlogEntity>> userIssueBlogMap2 = new HashMap<>();
        for (Map.Entry<Long, List<BlogEntity>> entry : userIssueBlogMap.entrySet()) {
            Long userId = entry.getKey();
            Map<String, BlogEntity> blogMap = entry.getValue().stream()
                    .collect(Collectors.toMap(blog -> {
                        return String.valueOf(blog.getBlogId());
                    }, Function.identity()));
            userIssueBlogMap2.put(userId, blogMap);
        }
        // 获取博客的关系映射
        Map<String, BlogEntity> issueBlogMap = issueBlogList.stream()
                .collect(Collectors.toMap(blog -> {
                    return String.valueOf(blog.getBlogId());
                }, Function.identity()));

        // 获取当前时间的日期对象
        Calendar time = Calendar.getInstance();
        time.set(Calendar.HOUR_OF_DAY, 0);    // 设置小时为0
        time.set(Calendar.MINUTE, 0);         // 设置分钟为0
        time.set(Calendar.SECOND, 0);         // 设置秒数为0
        time.set(Calendar.MILLISECOND, 0);    // 设置毫秒数为0
        // 获取今天的日期对象
        Date toDay = time.getTime();

        // 今天日期加上一天，获取明天的日期
        time.add(Calendar.DATE, 1);
        Date tomorrow = time.getTime();

        // 获取这个月的一号时间
        time.set(Calendar.DAY_OF_MONTH, 1);
        Date thisMonth = time.getTime();

        // 获取下个月一号的时间
        time.add(Calendar.MONTH, 1);
        Date nextMonth = time.getTime();

        SessionCallback<Boolean> callback = new SessionCallback<Boolean>() {
            @Override
            public Boolean execute(RedisOperations operations) throws DataAccessException {
                operations.multi();

                for (Map.Entry<Long, Map<String, BlogEntity>> entry : userSaveBlogMap2.entrySet()) {
                    Long userId = entry.getKey();
                    String userKey = userId + RedisConstant.saveBlogMap;
                    redisHashUtils.putAll(userKey, entry.getValue());
                }

                for (Map.Entry<Long, Map<String, BlogEntity>> entry : userCheckBlogMap2.entrySet()) {
                    Long userId = entry.getKey();
                    String userKey = userId + RedisConstant.checkBlogMap;
                    redisHashUtils.putAll(userKey, entry.getValue());
                }

                for (Map.Entry<Long, Map<String, BlogEntity>> entry : userIssueBlogMap2.entrySet()) {
                    Long userId = entry.getKey();
                    String userKey = userId + RedisConstant.blogMap;
                    redisHashUtils.putAll(userKey, entry.getValue());
                }

                redisHashUtils.putAll(RedisConstant.blogMap, issueBlogMap);
                long totalView = 0;
                long totalLike = 0;
                for (BlogEntity blog : issueBlogList) {
                    String blogId = String.valueOf(blog.getBlogId());
                    Date issueTime = blog.getIssueTime();

                    if (blog.getHotBlog() == 0) {
                        redisZSetUtils.add(RedisConstant.hotBlogZSet, blogId, issueTime.getTime());
                    }
                    if (blog.getRecommend() == 0) {
                        redisZSetUtils.add(RedisConstant.recommendBlogZSet, blogId, issueTime.getTime());
                    }
                    redisZSetUtils.add(RedisConstant.nowBlogZSet, blogId, issueTime.getTime());

                    redisZSetUtils.add(RedisConstant.viewBlogZSet, blogId, blog.getViewNum());

                    redisZSetUtils.add(RedisConstant.collectBlogZSet, blogId, blog.getCollectNum());

                    redisZSetUtils.add(RedisConstant.likeBlogZSet, blogId, blog.getLikeNum());

                    if (issueTime.compareTo(toDay) > 0 && issueTime.compareTo(tomorrow) < 0) {
                        redisZSetUtils.add(RedisConstant.nowBlogZSet, blogId, issueTime.getTime());
                    }
                    if (issueTime.compareTo(thisMonth) > 0 && issueTime.compareTo(nextMonth) < 0) {
                        redisZSetUtils.add(RedisConstant.monthBlogZSet, blogId, issueTime.getTime());
                    }

                    totalView += blog.getViewNum();
                    totalLike += blog.getLikeNum();
                }

                redisStringUtils.set(RedisConstant.viewCount, totalView);

                redisStringUtils.set(RedisConstant.likeCount, totalLike);

                operations.exec();
                return true;
            }
        };
        // 执行Redis事务操作，将所有博客相关信息缓存
        redisUtils.execute(callback);*/
    }

    @Override
    public void initBlogTagRelationCache() {
        log.info("开始初始化系统已发布博客关联的标签数量排行缓存业务");
        // 获取所有系统已发布博客的博客id
        List<Long> blogIdList = blogIssueService.getIssueBlogIdList();
        if (CollectionUtils.isEmpty(blogIdList)) {
            log.info("已发布博客为空，不初始化系统已发布博客关联的标签数量排行缓存");
            return;
        }
        /*
            查询博客关联的博客标签
         */
        List<BlogTagEntity> blogTagList = blogTagService.findList(blogIdList);
        if (blogTagList.isEmpty()) {
            log.info("已发布博客关联标签为空，不初始化系统已发布博客关联的标签数量排行缓存");
            return;
        }

        // 删除缓存
        redisZSetHandler.removeRange(RedisConstant.TAG_ZSET, -1, Integer.MAX_VALUE);

        Map<String, Double> tagMap = blogTagList.stream().collect(Collectors.groupingBy(
                BlogTagEntity::getTagName,
                Collectors.reducing(0.0, v -> 1.0, Double::sum))
        );
        redisZSetHandler.add(RedisConstant.TAG_ZSET, tagMap);

        log.info("初始化系统已发布博客关联的标签数量排行缓存业务结束");
    }

    @Override
    public void initLimit() {
        limitHelper.init();
    }
}
