package org.hhf.core;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;

/**
 * HashRedis类实现了Redis的哈希数据类型操作
 */
public class HashRedis {
    private final RedisCore core;
    private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();

    public HashRedis(RedisCore core) {
        this.core = core;
    }

    /**
     * 获取哈希表，如果不存在则创建
     */
    @SuppressWarnings("unchecked")
    private ConcurrentHashMap<String, String> getOrCreateHash(String key) {
        Object value = core.get(key);
        if (value == null || !(value instanceof ConcurrentHashMap)) {
            ConcurrentHashMap<String, String> hash = new ConcurrentHashMap<>();
            core.put(key, hash, RedisCore.DataType.HASH);
            return hash;
        }
        return (ConcurrentHashMap<String, String>) value;
    }

    /**
     * 设置哈希字段的值
     */
    public Long hset(String key, String field, String value) {
        lock.writeLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            boolean exists = hash.containsKey(field);
            hash.put(field, value);
            return exists ? 0L : 1L;
        } finally {
            lock.writeLock().unlock();
        }
    }

    /**
     * 获取哈希字段的值
     */
    public String hget(String key, String field) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return hash.get(field);
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 批量获取哈希字段的值
     */
    public List<String> hmget(String key, String... fields) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            List<String> values = new ArrayList<>(fields.length);
            for (String field : fields) {
                values.add(hash.get(field));
            }
            return values;
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 哈希字段值自增
     */
    public Long hincrby(String key, String field, long increment) {
        lock.writeLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            String value = hash.get(field);
            long num = 0;
            if (value != null) {
                try {
                    num = Long.parseLong(value);
                } catch (NumberFormatException e) {
                    return null;
                }
            }
            num += increment;
            hash.put(field, String.valueOf(num));
            return num;
        } finally {
            lock.writeLock().unlock();
        }
    }

    /**
     * 获取哈希表中的所有字段
     */
    public Set<String> hkeys(String key) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return new HashSet<>(hash.keySet());
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 获取哈希表中的所有值
     */
    public List<String> hvals(String key) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return new ArrayList<>(hash.values());
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 获取哈希表中的所有键值对
     */
    public Map<String, String> hgetall(String key) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return new HashMap<>(hash);
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 检查哈希字段是否存在
     */
    public Boolean hexists(String key, String field) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return hash.containsKey(field);
        } finally {
            lock.readLock().unlock();
        }
    }

    /**
     * 删除哈希字段
     */
    public Long hdel(String key, String... fields) {
        lock.writeLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            long deleted = 0;
            for (String field : fields) {
                if (hash.remove(field) != null) {
                    deleted++;
                }
            }
            return deleted;
        } finally {
            lock.writeLock().unlock();
        }
    }

    /**
     * 获取哈希表的大小
     */
    public Long hlen(String key) {
        lock.readLock().lock();
        try {
            ConcurrentHashMap<String, String> hash = getOrCreateHash(key);
            return (long) hash.size();
        } finally {
            lock.readLock().unlock();
        }
    }
}