package com.cui.project.utils;

import lombok.extern.slf4j.Slf4j;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * Redis批量写入工具类
 * 使用Pipeline技术提高批量操作性能
 * 
 * @author: 崔老爷
 */
@Component
@Slf4j
public class RedisBatchWriteUtil {

    @Resource
    private RedisTemplate<String, Object> redisTemplate;

    /**
     * 批量设置String类型的键值对（使用Pipeline）
     * 
     * @param dataMap 键值对Map
     * @return 成功写入的数量
     */
    public int batchSet(Map<String, Object> dataMap) {
        if (dataMap == null || dataMap.isEmpty()) {
            log.warn("批量写入数据为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
                    byte[] key = keySerializer.serialize(entry.getKey());
                    byte[] value = valueSerializer.serialize(entry.getValue());
                    if (key != null && value != null) {
                        connection.set(key, value);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量设置String类型的键值对，并设置过期时间（使用Pipeline）
     * 
     * @param dataMap 键值对Map
     * @param timeout 过期时间
     * @param timeUnit 时间单位
     * @return 成功写入的数量
     */
    public int batchSetWithExpire(Map<String, Object> dataMap, long timeout, TimeUnit timeUnit) {
        if (dataMap == null || dataMap.isEmpty()) {
            log.warn("批量写入数据为空");
            return 0;
        }

        long seconds = timeUnit.toSeconds(timeout);
        
        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
                    byte[] key = keySerializer.serialize(entry.getKey());
                    byte[] value = valueSerializer.serialize(entry.getValue());
                    if (key != null && value != null) {
                        connection.setEx(key, seconds, value);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量删除键（使用Pipeline）
     * 
     * @param keys 要删除的键集合
     * @return 成功删除的数量
     */
    public int batchDelete(List<String> keys) {
        if (keys == null || keys.isEmpty()) {
            log.warn("批量删除的键列表为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();

                for (String key : keys) {
                    byte[] keyBytes = keySerializer.serialize(key);
                    if (keyBytes != null) {
                        connection.del(keyBytes);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量设置Hash的字段（使用Pipeline）
     * 
     * @param key Hash的键
     * @param fieldValueMap Hash的字段值Map
     * @return 成功写入的数量
     */
    public int batchHashSet(String key, Map<String, Object> fieldValueMap) {
        if (fieldValueMap == null || fieldValueMap.isEmpty()) {
            log.warn("批量写入Hash数据为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                byte[] keyBytes = keySerializer.serialize(key);
                
                for (Map.Entry<String, Object> entry : fieldValueMap.entrySet()) {
                    byte[] field = keySerializer.serialize(entry.getKey());
                    byte[] value = valueSerializer.serialize(entry.getValue());
                    if (keyBytes != null && field != null && value != null) {
                        connection.hSet(keyBytes, field, value);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量向List右侧推入元素（使用Pipeline）
     * 
     * @param key List的键
     * @param values 要推入的值列表
     * @return 成功写入的数量
     */
    public int batchListRightPush(String key, List<Object> values) {
        if (values == null || values.isEmpty()) {
            log.warn("批量写入List数据为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                byte[] keyBytes = keySerializer.serialize(key);
                
                for (Object value : values) {
                    byte[] valueBytes = valueSerializer.serialize(value);
                    if (keyBytes != null && valueBytes != null) {
                        connection.rPush(keyBytes, valueBytes);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量向Set添加元素（使用Pipeline）
     * 
     * @param key Set的键
     * @param members 要添加的成员列表
     * @return 成功写入的数量
     */
    public int batchSetAdd(String key, List<Object> members) {
        if (members == null || members.isEmpty()) {
            log.warn("批量写入Set数据为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                byte[] keyBytes = keySerializer.serialize(key);
                
                for (Object member : members) {
                    byte[] memberBytes = valueSerializer.serialize(member);
                    if (keyBytes != null && memberBytes != null) {
                        connection.sAdd(keyBytes, memberBytes);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 批量添加到有序集合（使用Pipeline）
     * 
     * @param key ZSet的键
     * @param scoreMembers 分数-成员Map
     * @return 成功写入的数量
     */
    public int batchZSetAdd(String key, Map<Object, Double> scoreMembers) {
        if (scoreMembers == null || scoreMembers.isEmpty()) {
            log.warn("批量写入ZSet数据为空");
            return 0;
        }

        return redisTemplate.executePipelined(new RedisCallback<Object>() {
            @Override
            public Object doInRedis(RedisConnection connection) throws DataAccessException {
                RedisSerializer<String> keySerializer = (RedisSerializer<String>) redisTemplate.getKeySerializer();
                RedisSerializer<Object> valueSerializer = (RedisSerializer<Object>) redisTemplate.getValueSerializer();

                byte[] keyBytes = keySerializer.serialize(key);
                
                for (Map.Entry<Object, Double> entry : scoreMembers.entrySet()) {
                    byte[] memberBytes = valueSerializer.serialize(entry.getKey());
                    if (keyBytes != null && memberBytes != null) {
                        connection.zAdd(keyBytes, entry.getValue(), memberBytes);
                    }
                }
                return null;
            }
        }).size();
    }

    /**
     * 分批批量写入（适用于超大数据量）
     * 
     * @param dataMap 键值对Map
     * @param batchSize 每批的大小
     * @return 总共成功写入的数量
     */
    public long batchSetInBatches(Map<String, Object> dataMap, int batchSize) {
        if (dataMap == null || dataMap.isEmpty()) {
            log.warn("批量写入数据为空");
            return 0;
        }

        long totalCount = 0;
        Map<String, Object> batchMap = new java.util.HashMap<>(batchSize);
        int count = 0;

        for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
            batchMap.put(entry.getKey(), entry.getValue());
            count++;

            if (count >= batchSize) {
                totalCount += batchSet(batchMap);
                batchMap.clear();
                count = 0;
            }
        }

        // 处理剩余的数据
        if (!batchMap.isEmpty()) {
            totalCount += batchSet(batchMap);
        }

        log.info("分批批量写入完成，总共写入 {} 条数据", totalCount);
        return totalCount;
    }

    /**
     * 分批批量写入并设置过期时间（适用于超大数据量）
     * 
     * @param dataMap 键值对Map
     * @param batchSize 每批的大小
     * @param timeout 过期时间
     * @param timeUnit 时间单位
     * @return 总共成功写入的数量
     */
    public Long batchSetWithExpireInBatches(Map<String, Object> dataMap, int batchSize, 
                                            long timeout, TimeUnit timeUnit) {
        if (dataMap == null || dataMap.isEmpty()) {
            log.warn("批量写入数据为空");
            return 0L;
        }

        long totalCount = 0;
        Map<String, Object> batchMap = new java.util.HashMap<>(batchSize);
        int count = 0;

        for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
            batchMap.put(entry.getKey(), entry.getValue());
            count++;

            if (count >= batchSize) {
                totalCount += batchSetWithExpire(batchMap, timeout, timeUnit);
                batchMap.clear();
                count = 0;
            }
        }

        // 处理剩余的数据
        if (!batchMap.isEmpty()) {
            totalCount += batchSetWithExpire(batchMap, timeout, timeUnit);
        }

        log.info("分批批量写入（带过期）完成，总共写入 {} 条数据", totalCount);
        return totalCount;
    }
}