package com.nami.conver.infra.middleware.mq;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.date.TimeInterval;
import com.nami.conver.domain.pchat.model.ConverModel;
import com.nami.conver.domain.repository.CacheConverRepository;
import com.nami.conver.infra.cache.conver.ConverCache;
import com.nami.conver.types.util.L2ConverCacheKeyUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.core.utils.StringUtils;
import org.dromara.common.json.utils.JsonUtils;
import org.dromara.common.redis.utils.RedisTemplateKit;
import org.dromara.common.redis.utils.RedisUtils;
import org.redisson.api.RedissonClient;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.stereotype.Component;

import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.nami.conver.types.constants.CommonConst.BASE_NUM_1;

@Slf4j
@Component
@RequiredArgsConstructor
public class CacheConverRepositoryImpl implements CacheConverRepository {

    private final RedisTemplateKit redisTemplateKit;
    private final RedissonClient redissonClient;

    @Override
    public boolean writeConver(ConverModel converDomain) {
        TimeInterval timer = DateUtil.timer();
        log.info("[cache] 刷新缓存 request={}", JsonUtils.toJsonString(converDomain));
        // 用来set单个
        redisTemplateKit.hset(L2ConverCacheKeyUtil.buildConverInfoKey(converDomain.getAppKey(), converDomain.getUserId()),
            L2ConverCacheKeyUtil.buildConverItemKey(converDomain.getUserId(), converDomain.getTargetId()), converDomain);

        // 用来分页.通过分页数，批量获取后，再去调用hset数据
        // 三个参数太多了占用数据，采用唯一targetId 参数
//        String converValue = String.format("%s:%s:%s", converItem.getAppKey(), converItem.getUserId(), converItem.getTargetId());
//       redisTemplate 在数据量大的时候，出现org.springframework.data.redis.RedisSystemException: Thread limit exceeded replacing blocked worker
//       加大线程池配置依旧不行，采用redisson的zAdd方法
//        redisTemplateKit.zAdd(buildConverZAddSortKey(converDomain.getAppKey(), converDomain.getUserId()), converDomain.getTargetId(), converDomain.getSortTime());
        RedisUtils.zAdd(L2ConverCacheKeyUtil.buildConverZAddSortKey(converDomain.getAppKey(),
            converDomain.getUserId()), converDomain.getTargetId(), converDomain.getSortTime());
        if (timer.interval() > 200) {
            log.warn("[cache] 刷新缓存出现耗时高， userId={}. targetId={} use={}ms", converDomain.getUserId(),
                converDomain.getTargetId(), timer.interval());
        }
        return Boolean.TRUE;
    }

    @Override
    public boolean writeAll(List<ConverModel> items) {
        TimeInterval timer = DateUtil.timer();
        log.info("[cache] 存入数量：{}", items.size());
        String appKey = items.get(0).getAppKey();
        Map<byte[], Map<byte[], byte[]>> addMap = new HashMap<>();
        for (ConverModel item : items) {
            String key = L2ConverCacheKeyUtil.infoBucket(item.getUserId());
            Map<byte[], byte[]> valueMap = addMap.get(key.getBytes(StandardCharsets.UTF_8));
            if (valueMap == null) {
                valueMap = new HashMap<>();
                valueMap.put((L2ConverCacheKeyUtil.buildConverItemKey(item.getUserId(), item.getTargetId())).getBytes(StandardCharsets.UTF_8), JsonUtils.toJsonByte(item));
                continue;
            }
            valueMap.put((L2ConverCacheKeyUtil.buildConverItemKey(item.getUserId(), item.getTargetId())).getBytes(StandardCharsets.UTF_8), JsonUtils.toJsonByte(item));
        }

        // 采用pipeline
        pipelineUpdateOrInsertZSetMembers(items, addMap);
        // for循环需要1-2s
//        for (ConverItem item : items) {
//            redisTemplateUtil.zAdd("nami:" + item.getAppKey() + ":conver:user:" + item.getUserId(), item.getTargetId(), item.getSortTime());
//        }

        if (timer.interval() > 200) {
            log.warn("[cache] 保存会话出问题了。 appKey={} size={} use:{}ms", appKey, items.size(), timer.interval());
        }
        return Boolean.TRUE;
    }

    /**
     * 使用管道批量更新或插入 ZSet 中的对象。
     *
     * @param items 列表中的每个元素都是要更新或插入的会话对象
     */
    public void pipelineUpdateOrInsertZSetMembers(List<ConverModel> items, Map<byte[], Map<byte[], byte[]>> addMap) {
        if (CollUtil.isEmpty(items)) {
            return;
        }

        redisTemplateKit.executePipelined((RedisCallback<Object>) connection -> {
            for (ConverModel item : items) {
                String key = L2ConverCacheKeyUtil.buildConverZAddSortKey(item.getAppKey(), item.getUserId());
//                String member = String.format("%s:%s", item.getAppKey(), item.getTargetId());

                // 使用原始字节数组避免序列化开销
//                byte[] rawKey = key.getBytes();
//                byte[] rawMember = item.getTargetId().getBytes();
                //                double score = item.getSortTime();

                // 执行 ZADD 命令
                connection.zAdd(key.getBytes(), item.getSortTime(), item.getTargetId().getBytes());
            }
            for (Map.Entry<byte[], Map<byte[], byte[]>> entry : addMap.entrySet()) {
//                connection.hMSet(entry.getKey(), entry.getValue());
                connection.hashCommands().hMSet(entry.getKey(), entry.getValue());
            }

            return null;
        });
    }

    @Override
    public ConverModel get(String appKey, String userId, String targetId) {
        if (StringUtils.isBlank(appKey) || StringUtils.isBlank(userId) || StringUtils.isBlank(targetId)) {
            return null;
        }
        ConverModel converItem = (ConverModel) redisTemplateKit.hget(L2ConverCacheKeyUtil.buildConverInfoKey(appKey, userId),
            L2ConverCacheKeyUtil.buildConverItemKey(userId, targetId));
        return converItem;
    }

    /**
     * 再添加一份，以群为维度，存入数据？
     *
     * @param appKey
     * @param keys
     * @param targetId
     * @return
     */
    @Override
    public List<ConverModel> getConversByGrpView(String appKey, List<String> keys, String targetId) {
        TimeInterval timer = DateUtil.timer();
        if (StringUtils.isBlank(appKey) || CollUtil.isEmpty(keys)) {
            return null;
        }

//        List<ConverModel> converItems = new ArrayList<>();

        List<ConverModel> converItems = redisTemplateKit.executePipelined((RedisCallback<Object>) connection -> {
            for (String key : keys) {
                byte[] infoKey = L2ConverCacheKeyUtil.buildConverInfoKey(appKey, key).getBytes(StandardCharsets.UTF_8);
                byte[] infoItem = L2ConverCacheKeyUtil.buildConverItemKey(key, targetId).getBytes(StandardCharsets.UTF_8);
                connection.hashCommands().hGet(infoKey, infoItem); // 仅提交命令，不解析结果
            }
            return null;
        });

//
//        redisTemplateKit.executePipelined((RedisCallback<Object>) connection -> {
//            for (String key : keys) {
//
////                byte[] infoKey = L2ConverCacheKeyUtil.buildConverInfoKey(appKey, key).getBytes( StandardCharsets.UTF_8 );
////                byte[] infoItem = L2ConverCacheKeyUtil.buildConverItemKey(key, targetId).getBytes( StandardCharsets.UTF_8);
//                byte[] infoKey = redisTemplateKit.getKeySerializer().serialize(L2ConverCacheKeyUtil.buildConverInfoKey(appKey, key));
//                byte[] infoItem = redisTemplateKit.getHashKeySerializer().serialize(L2ConverCacheKeyUtil.buildConverItemKey(key, targetId));
//
//                log.info("key={}, item={}", L2ConverCacheKeyUtil.buildConverInfoKey(appKey, key), L2ConverCacheKeyUtil.buildConverItemKey(key, targetId));
//
//                byte[] bytes = connection.hashCommands().hGet(infoKey, infoItem);
//                ConverModel converItem = JsonUtils.parseObject(bytes, ConverModel.class);
//                if (converItem != null) {
//                    converItems.add(converItem);
//                }
//            }
//
//            return null;
//        });

        if (CollUtil.isEmpty(converItems)) {
            return null;
        }
        List<ConverModel> filters = converItems.stream().filter(item -> item != null).collect(Collectors.toList());
        if (timer.interval() > 200) {
            log.warn("[cache] 以群为维度，批量查询群成员出现 size={} use={}ms", converItems.size(), timer.interval());
        }
        return filters;
    }

    @Override
    public List<ConverModel> getConverFromKeys(String appKey, String userId, List<String> keys) {
        if (StringUtils.isBlank(appKey) || CollUtil.isEmpty(keys)) {
            return null;
        }
        List<ConverModel> list = redisTemplateKit.hMultiGetAll(L2ConverCacheKeyUtil.buildConverInfoKey(appKey, userId), keys);
        if (CollUtil.isEmpty(list)) {
            return null;
        }
        List<ConverModel> filters = list.stream().filter(item -> item != null).collect(Collectors.toList());
        return filters;
    }

    @Override
    public void set(String key, Object value) {
        redisTemplateKit.set(key, value);
    }

    @Override
    public boolean isExistByKey(String key) {
        Integer result = (Integer) redisTemplateKit.get(key);
        if (result == null || result != BASE_NUM_1) {
            return Boolean.FALSE;
        }
        return Boolean.TRUE;
    }

    @Override
    public ConverModel get(String key) {
        return ConverCache.get(key);
    }

    @Override
    public void insertOrUpd(String key, ConverModel item) {
        ConverCache.insertOrUpd(key, item);
    }


}
