package com.xiaofu.live.user.provider.service.impl;

import com.alibaba.fastjson2.JSON;
import com.alibaba.nacos.shaded.com.google.common.collect.Maps;
import com.xiaofu.live.common.interfaces.ConvertBeanUtils;
import com.xiaofu.live.framework.redis.starter.key.UserProviderCacheKeyBuilder;
import com.xiaofu.live.user.dto.CacheAsyncDeleteCode;
import com.xiaofu.live.user.dto.UserCacheAsyncDeleteDTO;
import com.xiaofu.live.user.dto.UserDTO;
import com.xiaofu.live.user.provider.config.RocketMQConsumerConfig;
import com.xiaofu.live.user.provider.dao.mapper.UserMapper;
import com.xiaofu.live.user.provider.dao.po.UserPO;
import com.xiaofu.live.user.provider.service.IUserService;
import groovy.util.logging.Slf4j;
import jakarta.annotation.Resource;
import org.apache.rocketmq.client.producer.MQProducer;
import org.apache.rocketmq.common.message.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.core.RedisOperations;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.SessionCallback;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;

@Service
@Slf4j
public class UserServiceImpl implements IUserService {

    private static final Logger log = LoggerFactory.getLogger(UserServiceImpl.class);
    @Autowired
    private UserMapper userMapper;

    @Resource
    private RedisTemplate<String, UserDTO> redisTemplate;

    @Autowired
    private UserProviderCacheKeyBuilder userProviderCacheKeyBuilder;

    @Resource
    private MQProducer mqProducer;
    @Autowired
    private RocketMQConsumerConfig rocketMQConsumerConfig;

    @Override
    public UserDTO getByUserId(Long userId) {
        if (userId == null) {
            return null;
        }
        String key = userProviderCacheKeyBuilder.buildUserInfoKey(userId);
        UserDTO userDTO = redisTemplate.opsForValue().get(key);
        if (userDTO != null) {
            return userDTO;
        }
        userDTO = ConvertBeanUtils.convert(userMapper.selectById(userId), UserDTO.class);
        if (userDTO != null) {
            redisTemplate.opsForValue().set(key, userDTO,30, TimeUnit.MINUTES);
        }
        return userDTO;
    }

    @Override
    public boolean updateUserInfo(UserDTO userDTO) {
        if (userDTO == null || userDTO.getUserId() == null) {
            return false;
        }
        userMapper.updateById(ConvertBeanUtils.convert(userDTO, UserPO.class));
        String key = userProviderCacheKeyBuilder.buildUserInfoKey(userDTO.getUserId());
        redisTemplate.delete(key);
        log.info("第一次业务删除userDTO: {}",userDTO);

        try {
            // MQ 延迟再次删除
            Message message = new Message();
            UserCacheAsyncDeleteDTO userCacheAsyncDeleteDTO = new UserCacheAsyncDeleteDTO();
            Map<String, Object> jsonParam = new HashMap<>();
            jsonParam.put("userId", userDTO.getUserId());
            userCacheAsyncDeleteDTO.setCode(CacheAsyncDeleteCode.USER_INFO_DELETE.getCode());
            userCacheAsyncDeleteDTO.setJson(JSON.toJSONString(jsonParam));
            message.setBody(JSON.toJSONString(userCacheAsyncDeleteDTO).getBytes());
            message.setTopic(RocketMQConsumerConfig.CACHE_ASYNC_DELETE);
            // 延迟级别, 1 代表一秒钟发送
            message.setDelayTimeLevel(2);
            mqProducer.send(message);
        } catch (Exception e) {
            log.error(e.getMessage(),e);
        }
        return true;
    }

    @Override
    public UserDTO insertUserInfo(UserDTO userDTO) {
        if (userDTO == null || userDTO.getUserId() == null) {
            return null;
        }
        UserPO userPO = ConvertBeanUtils.convert(userDTO, UserPO.class);
        userMapper.insert(userPO);
        return ConvertBeanUtils.convert(userPO, UserDTO.class);
    }

    @Override
    public Map<Long, UserDTO> batchQueryUserInfo(List<Long> userIdList) {
        if (CollectionUtils.isEmpty(userIdList)) {
            return Maps.newHashMap();
        }
        userIdList = userIdList.stream().filter(id -> id > 10000).collect(Collectors.toList());
        if (CollectionUtils.isEmpty(userIdList)) {
            return Maps.newHashMap();
        }

        // redis
        ArrayList<String> keyList = new ArrayList<>();
        userIdList.forEach(userId -> {
            keyList.add(userProviderCacheKeyBuilder.buildUserInfoKey(userId));
        });
        List<UserDTO> userDTOList = new ArrayList<>(redisTemplate.opsForValue().multiGet(keyList).stream().filter(Objects::nonNull).toList());

        if (!CollectionUtils.isEmpty(userDTOList) && userDTOList.size() == userIdList.size()) {
            return userDTOList.stream().collect(Collectors.toMap(UserDTO::getUserId, Function.identity()));
        }

        List<Long> userIdInCacheList = userDTOList.stream().map(UserDTO::getUserId).collect(Collectors.toList());
        List<Long> userIdNotInCacheList = userIdList.stream().filter(x -> !userIdInCacheList.contains(x)).collect(Collectors.toList());

        // 多线程替换union all
        Map<Long, List<Long>> userIdMap = userIdNotInCacheList.stream().collect(Collectors.groupingBy(userId -> userId % 100));
        List<UserDTO> dbQueryResult = new CopyOnWriteArrayList<>();
        userIdMap.values().parallelStream().forEach(queryUserIdList -> {
            dbQueryResult.addAll(ConvertBeanUtils.convertList(userMapper.selectBatchIds(queryUserIdList), UserDTO.class));
        });

        if (!CollectionUtils.isEmpty(dbQueryResult)) {
            Map<String, UserDTO> saveCacheMap = dbQueryResult.stream().collect(Collectors.toMap(userDTO -> userProviderCacheKeyBuilder.buildUserInfoKey(userDTO.getUserId()), Function.identity()));
            redisTemplate.opsForValue().multiSet(saveCacheMap);
            // 管道批量传输命令, 减少网络IO开销
            redisTemplate.executePipelined(new SessionCallback<Object>() {

                @Override
                public <K, V> Object execute(RedisOperations<K, V> operations) throws DataAccessException {
                    for (String redisKey : saveCacheMap.keySet()) {
                        operations.expire((K) redisKey, createRandomExpireTime(), TimeUnit.SECONDS);
                    }
                    return null;
                }
            });
            userDTOList.addAll(dbQueryResult);
        }

        return userDTOList.stream().collect(Collectors.toMap(UserDTO::getUserId, Function.identity()));
    }

    private int createRandomExpireTime() {
        int time = ThreadLocalRandom.current().nextInt(1000);
        return time + 60 * 30;
    }

}
