package com.zhg.shortlink.service;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zhg.shortlink.domain.entity.ShortUrlMapping;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.redisson.api.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * 集群感知缓存服务
 * 使用Redis集群分片和Hash Tag策略优化数据分布
 * @author 朱洪刚
 * @version 1.0
 * @data 2025/10/21 17:13
 */
@Slf4j
@RequiredArgsConstructor
@Service
public class ClusterAwareCacheService {

    private static final String CACHE_PREFIX = "shortlink:";
    private static final String URL_CACHE_KEY = CACHE_PREFIX + "url:";
    private static final String COUNT_CACHE_KEY = CACHE_PREFIX + "count:";
    private static final String HASH_MAPPING_KEY = CACHE_PREFIX + "hash:";

    private static final Duration DEFAULT_EXPIRE_TIME = Duration.ofHours(1);
    private static final Duration HOT_DATA_EXPIRE_TIME = Duration.ofHours(24);
    private static final Duration COUNT_EXPIRE_TIME = Duration.ofDays(7);

    private final RedissonClient redissonClient;
    private final ObjectMapper objectMapper;
    private final LocalCacheService localCacheService;
    private final TieredBloomFilterService tieredBloomFilterService;
    private final ShardingStrategyService shardingStrategyService;

    @Value("${shortlink.cluster.batch-size:50}")
    private int batchSize;

    @Value("${shortlink.cluster.enable-hash-tag:true}")
    private boolean enableHashTag;

    /**
     * 从集群缓存获取短链信息（支持Hash Tag）
     */
    public ShortUrlMapping getFromCache(String shortCode) {
        if (!StringUtils.hasText(shortCode)) {
            log.warn("shortCode为空，无法获取缓存");
            return null;
        }

        // 先从本地缓存获取
        ShortUrlMapping shortUrlMapping = localCacheService.getFromLocalCache(shortCode);
        if (shortUrlMapping != null) {
            log.debug("本地缓存命中: {}", shortCode);
            return shortUrlMapping;
        }

        // 从Redis集群获取
        shortUrlMapping = getFromRedisCluster(shortCode);
        if (shortUrlMapping != null) {
            log.debug("Redis集群缓存命中: {}, 分片槽位: {}",
                    shortCode, shardingStrategyService.calculateSlot(shortCode));
            // 将Redis数据放入本地缓存
            localCacheService.putToLocalCache(shortCode, shortUrlMapping);
        }

        return shortUrlMapping;
    }

    /**
     * 将短链信息放入集群缓存（支持Hash Tag）
     */
    public void putToCache(String shortCode, ShortUrlMapping shortUrlMapping) {
        if (!StringUtils.hasText(shortCode) || shortUrlMapping == null) {
            log.warn("参数为空，跳过缓存操作: shortCode={}", shortCode);
            return;
        }

        // 放入本地缓存
        localCacheService.putToLocalCache(shortCode, shortUrlMapping);

        // 放入Redis集群缓存
        cacheToRedisCluster(shortCode, shortUrlMapping);

        log.debug("缓存短链信息到集群: {}, 分片槽位: {}",
                shortCode, shardingStrategyService.calculateSlot(shortCode));
    }

    /**
     * 批量获取短链信息（集群优化）
     */
    public Map<String, ShortUrlMapping> batchGetFromCache(List<String> shortCodes) {
        if (shortCodes == null || shortCodes.isEmpty()) {
            return Collections.emptyMap();
        }

        Map<String, ShortUrlMapping> result = new HashMap<>();
        List<String> missedCodes = new ArrayList<>();

        // 先从本地缓存批量获取
        for (String shortCode : shortCodes) {
            ShortUrlMapping cached = localCacheService.getFromLocalCache(shortCode);
            if (cached != null) {
                result.put(shortCode, cached);
            } else {
                missedCodes.add(shortCode);
            }
        }

        // 从Redis集群批量获取未命中的数据
        if (!missedCodes.isEmpty()) {
            Map<String, ShortUrlMapping> redisResult = batchGetFromRedisCluster(missedCodes);
            result.putAll(redisResult);

            // 将Redis数据放入本地缓存
            redisResult.forEach(localCacheService::putToLocalCache);
        }

        log.debug("批量获取缓存: 请求={}, 本地命中={}, Redis命中={}",
                shortCodes.size(), shortCodes.size() - missedCodes.size(),
                result.size() - (shortCodes.size() - missedCodes.size()));

        return result;
    }

    /**
     * 批量缓存短链信息（集群优化）
     */
    public void batchPutToCache(Map<String, ShortUrlMapping> mappings) {
        if (mappings == null || mappings.isEmpty()) {
            return;
        }

        // 批量放入本地缓存
        mappings.forEach(localCacheService::putToLocalCache);

        // 批量放入Redis集群缓存
        batchCacheToRedisCluster(mappings);

        log.debug("批量缓存到集群: 数量={}", mappings.size());
    }

    /**
     * 增加访问计数（集群分片优化）
     */
    public Long incrementAccessCount(String shortCode) {
        try {
            String key = generateHashTagKey(COUNT_CACHE_KEY, shortCode);
            RAtomicLong atomicLong = redissonClient.getAtomicLong(key);

            long count = atomicLong.incrementAndGet();
            atomicLong.expire(COUNT_EXPIRE_TIME);

            log.debug("访问计数增加: {}, 当前计数: {}, 分片槽位: {}",
                    shortCode, count, shardingStrategyService.calculateSlot(key));

            return count;
        } catch (Exception e) {
            log.error("增加访问计数失败: shortCode={}, error={}", shortCode, e.getMessage());
            return null;
        }
    }

    /**
     * 获取访问计数
     */
    public Long getAccessCount(String shortCode) {
        try {
            String key = generateHashTagKey(COUNT_CACHE_KEY, shortCode);
            RAtomicLong atomicLong = redissonClient.getAtomicLong(key);
            return atomicLong.get();
        } catch (Exception e) {
            log.error("获取访问计数失败: shortCode={}, error={}", shortCode, e.getMessage());
            return 0L;
        }
    }

    /**
     * URL哈希映射缓存
     */
    public void putUrlHashMapping(String originUrlHash, String shortCode) {
        try {
            String key = generateHashTagKey(HASH_MAPPING_KEY, originUrlHash);
            RBucket<String> bucket = redissonClient.getBucket(key);
            bucket.set(shortCode, DEFAULT_EXPIRE_TIME);

            log.debug("缓存URL哈希映射: hash={}, shortCode={}, 分片槽位: {}",
                    originUrlHash, shortCode, shardingStrategyService.calculateSlot(key));
        } catch (Exception e) {
            log.error("缓存URL哈希映射失败: hash={}, error={}", originUrlHash, e.getMessage());
        }
    }

    /**
     * 获取URL哈希映射
     */
    public String getShortCodeByUrlHash(String originUrlHash) {
        try {
            String key = generateHashTagKey(HASH_MAPPING_KEY, originUrlHash);
            RBucket<String> bucket = redissonClient.getBucket(key);
            return bucket.get();
        } catch (Exception e) {
            log.error("获取URL哈希映射失败: hash={}, error={}", originUrlHash, e.getMessage());
            return null;
        }
    }

    /**
     * 删除URL哈希映射
     */
    public void removeUrlHashMapping(String originUrlHash) {
        try {
            String key = generateHashTagKey(HASH_MAPPING_KEY, originUrlHash);
            RBucket<String> bucket = redissonClient.getBucket(key);
            bucket.delete();
        } catch (Exception e) {
            log.error("删除URL哈希映射失败: hash={}, error={}", originUrlHash, e.getMessage());
        }
    }

    /**
     * 布隆过滤器检查
     */
    public boolean existsInBloomFilter(String shortCode) {
        return tieredBloomFilterService.mightContain(shortCode);
    }

    /**
     * 添加到布隆过滤器
     */
    public void addToBloomFilter(String shortCode) {
        tieredBloomFilterService.put(shortCode);
    }

    /**
     * 清除缓存
     */
    public void evictCache(String shortCode) {
        // 清除本地缓存
        localCacheService.evictFromLocalCache(shortCode);

        // 清除Redis集群缓存
        try {
            String urlKey = generateHashTagKey(URL_CACHE_KEY, shortCode);
            String countKey = generateHashTagKey(COUNT_CACHE_KEY, shortCode);

            RBatch batch = redissonClient.createBatch();
            batch.getBucket(urlKey).deleteAsync();
            batch.getAtomicLong(countKey).deleteAsync();
            batch.execute();

            log.debug("清除集群缓存: {}", shortCode);
        } catch (Exception e) {
            log.error("清除集群缓存失败: shortCode={}, error={}", shortCode, e.getMessage());
        }
    }

    // ==================== 私有方法 ====================

    /**
     * 从Redis集群获取数据
     */
    private ShortUrlMapping getFromRedisCluster(String shortCode) {
        try {
            String key = generateHashTagKey(URL_CACHE_KEY, shortCode);
            RBucket<String> bucket = redissonClient.getBucket(key);
            String json = bucket.get();

            if (json != null) {
                return objectMapper.readValue(json, ShortUrlMapping.class);
            }
        } catch (JsonProcessingException e) {
            log.error("Redis集群反序列化失败: shortCode={}, error={}", shortCode, e.getMessage());
        } catch (Exception e) {
            log.error("Redis集群查询失败: shortCode={}, error={}", shortCode, e.getMessage());
        }
        return null;
    }

    /**
     * 缓存到Redis集群
     */
    private void cacheToRedisCluster(String shortCode, ShortUrlMapping shortUrlMapping) {
        try {
            String key = generateHashTagKey(URL_CACHE_KEY, shortCode);
            String json = objectMapper.writeValueAsString(shortUrlMapping);

            RBucket<String> bucket = redissonClient.getBucket(key);
            Duration expireTime = isHotData(shortUrlMapping) ? HOT_DATA_EXPIRE_TIME : DEFAULT_EXPIRE_TIME;
            bucket.set(json, expireTime);

            log.debug("Redis集群缓存成功: {}", shortCode);
        } catch (JsonProcessingException e) {
            log.error("Redis集群序列化失败: shortCode={}, error={}", shortCode, e.getMessage());
        } catch (Exception e) {
            log.error("Redis集群缓存失败: shortCode={}, error={}", shortCode, e.getMessage());
        }
    }

    /**
     * 批量从Redis集群获取数据
     */
    private Map<String, ShortUrlMapping> batchGetFromRedisCluster(List<String> shortCodes) {
        Map<String, ShortUrlMapping> result = new HashMap<>();

        try {
            // 按分片分组批量操作
            Map<Integer, List<String>> shardGroups = shortCodes.stream()
                    .collect(Collectors.groupingBy(code ->
                            shardingStrategyService.calculateSlot(generateHashTagKey(URL_CACHE_KEY, code))));

            List<CompletableFuture<Void>> futures = new ArrayList<>();

            for (Map.Entry<Integer, List<String>> entry : shardGroups.entrySet()) {
                CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
                    RBatch batch = redissonClient.createBatch();
                    Map<String, RBucketAsync<String>> buckets = new HashMap<>();

                    for (String shortCode : entry.getValue()) {
                        String key = generateHashTagKey(URL_CACHE_KEY, shortCode);
                        buckets.put(shortCode, batch.getBucket(key));
                    }

                    BatchResult<?> batchResult = batch.execute();

                    for (Map.Entry<String, RBucketAsync<String>> bucketEntry : buckets.entrySet()) {
                        try {
                            String json = (String) batchResult.getResponses().get(
                                    new ArrayList<>(buckets.values()).indexOf(bucketEntry.getValue()));
                            if (json != null) {
                                ShortUrlMapping mapping = objectMapper.readValue(json, ShortUrlMapping.class);
                                synchronized (result) {
                                    result.put(bucketEntry.getKey(), mapping);
                                }
                            }
                        } catch (Exception e) {
                            log.error("批量获取解析失败: shortCode={}, error={}",
                                    bucketEntry.getKey(), e.getMessage());
                        }
                    }
                });
                futures.add(future);
            }

            // 等待所有批量操作完成
            CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
                    .get(5, TimeUnit.SECONDS);

        } catch (Exception e) {
            log.error("批量从Redis集群获取失败: error={}", e.getMessage());
        }

        return result;
    }

    /**
     * 批量缓存到Redis集群
     */
    private void batchCacheToRedisCluster(Map<String, ShortUrlMapping> mappings) {
        try {
            // 按分片分组批量操作
            Map<Integer, Map<String, ShortUrlMapping>> shardGroups = new HashMap<>();

            for (Map.Entry<String, ShortUrlMapping> entry : mappings.entrySet()) {
                String key = generateHashTagKey(URL_CACHE_KEY, entry.getKey());
                int slot = shardingStrategyService.calculateSlot(key);
                shardGroups.computeIfAbsent(slot, k -> new HashMap<>())
                        .put(entry.getKey(), entry.getValue());
            }

            List<CompletableFuture<Void>> futures = new ArrayList<>();

            for (Map<String, ShortUrlMapping> shardMappings : shardGroups.values()) {
                CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
                    RBatch batch = redissonClient.createBatch();

                    for (Map.Entry<String, ShortUrlMapping> entry : shardMappings.entrySet()) {
                        try {
                            String key = generateHashTagKey(URL_CACHE_KEY, entry.getKey());
                            String json = objectMapper.writeValueAsString(entry.getValue());
                            Duration expireTime = isHotData(entry.getValue()) ?
                                    HOT_DATA_EXPIRE_TIME : DEFAULT_EXPIRE_TIME;
                            batch.getBucket(key).setAsync(json, expireTime);
                        } catch (Exception e) {
                            log.error("批量缓存序列化失败: shortCode={}, error={}",
                                    entry.getKey(), e.getMessage());
                        }
                    }

                    batch.execute();
                });
                futures.add(future);
            }

            // 等待所有批量操作完成
            CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
                    .get(5, TimeUnit.SECONDS);

        } catch (Exception e) {
            log.error("批量缓存到Redis集群失败: error={}", e.getMessage());
        }
    }

    /**
     * 生成Hash Tag键（确保相关数据在同一分片）
     */
    private String generateHashTagKey(String prefix, String shortCode) {
        if (enableHashTag) {
            return prefix + "{" + shortCode + "}";
        }
        return prefix + shortCode;
    }

    /**
     * 判断是否为热点数据
     */
    private boolean isHotData(ShortUrlMapping shortUrlMapping) {
        return shortUrlMapping.getAccessCount() != null && shortUrlMapping.getAccessCount() > 1000;
    }
}
