package com.itheima.service.impl;

import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;

import org.redisson.Redisson;
import org.redisson.api.RLock;
import org.redisson.api.RReadWriteLock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import com.itheima.entity.Product;
import com.itheima.mapper.ProductMapper;
import com.itheima.utils.RedisUtils;

@Service
public class ProductService {

    @Autowired
    private Redisson redisson;

    @Autowired
    private ProductMapper productMapper;

    @Autowired
    private RedisUtils redisUtils;

    private final String PRODUCT_CACHE_KEY = "product_cache_key:";

    private final String PRODUCT_CACHE_LOCK_PREFIX = "product_cache_lock:";

    private Map<Long, Product> productMap = new HashMap<>();

    // 读写锁
    private final String PRODUCT_CACHE_LOCK_UPDATE = "product_cache_lock_update:";

    private final String EMPTY_CACHE = "{}";

    private final Integer PRODUCT_CACHE_TIMEOUT = 60 * 24 * 3;

    /**
     * 1000 : 存入的键的个数
     * 0.001 误差率
     */
    BloomFilter<String> filter = BloomFilter.create(Funnels.stringFunnel(Charset.forName("UTF-8")),1000,0.001);

    @Transactional
    public Product create(Product product) {
        redisUtils.set(PRODUCT_CACHE_KEY + product.getId(), product, getProductCacheTime(), TimeUnit.SECONDS);
        productMap.put(product.getId(), product);
        return productMapper.create(product);
    }

    @Transactional
    public Product update(Product product) {
        Product update = null;
        RReadWriteLock readWriteLock = redisson.getReadWriteLock(PRODUCT_CACHE_LOCK_UPDATE + product.getId());
        RLock writeLock = readWriteLock.writeLock();
        writeLock.lock(); // 加锁成功别的线程获取读锁会等待当前写锁的释放
        try {
            update = productMapper.update(product);
            productMap.put(product.getId(), product);
            redisUtils.set(PRODUCT_CACHE_KEY + product.getId(), product, getProductCacheTime(), TimeUnit.SECONDS);
        } finally {
            writeLock.unlock();
        }
        return update;
    }

    /**
     * 高并发/热点缓存重建
     */
    public Product get(Long productId) {

        // 先用布隆过滤器先判断, 防止恶意缓存攻击
        if (!filter.mightContain(productId + "")) {
            return new Product();
        }

        // 本地jvm缓存有直接返回(可以抗住百万并发)
        Product product1 = productMap.get(productId);
        if (product1 != null) {
            return product1;
        }

        String productCacheKey = PRODUCT_CACHE_KEY + productId;
        Product product = getFromCache(productCacheKey);
        if (product != null) {
            return product;
        }

        // 防止热点缓存并发重建, 只允许一个线程读数据库写缓存
        RLock lock = redisson.getLock(PRODUCT_CACHE_LOCK_PREFIX + productId);
        lock.lock();
        try {
            // 双重检测DCL, 因为下面的getFromMySqlAndWriteCache已经写入缓存了, 当拿到锁的线程成功写缓存并且释放锁后
            // 其他线程进来就可以直接从缓存中获取, 不用再查数据库写缓存, 因此只有一个线程读数据库写缓存
            product = getFromCache(productCacheKey);
            if (product != null) {
                return product;
            }
            // 只有一个线程查数据库并写入缓存(读写互斥锁, 解决双写不一致问题)
            product = getFromMySqlAndWriteCache(productId);
        } finally {
            lock.unlock();
        }
        return product;
    }


    // 只有一个线程查数据库并写入缓存
    private Product getFromMySqlAndWriteCache(Long productId) {
        Product product = null;
        /**
         * 这里为什么要加读写锁的读锁? (为了防止数据库/缓存不一致问题)
         * 此时的线程在 (a) 和 (b) 操作之间, 别的线程在执行修改商品操作 update()
         *  (a)productMapper.get(productId) --> stock = 101
         *                                    update() --> set stock = 100
         *  (b)redisUtils.set(key, value); --> value:101(其实数据库只有100了, 被别的线程修改成100)
         *  因此在必须保证(a) 和 (b) 操作之间的原子性, 加读写锁
         *  如果别的线程拿到的是写锁, 此时readLock.lock()会加锁失败, 直到别的写锁成功更新数据并释放锁
         *  如果别的线程拿到的是读锁, 此时readLock.lock()会加锁成功, 继续执行读数据库写缓存操作
         */
        RReadWriteLock readWriteLock = redisson.getReadWriteLock(PRODUCT_CACHE_LOCK_UPDATE + productId);
        RLock readLock = readWriteLock.readLock(); // (读写互斥锁, 解决双写不一致问题) update()方法有写锁
        readLock.lock(); // 如果写锁拿到了锁(其他读锁都能加锁读), 此时线程会阻塞等待写锁释放后才会加锁  update()方法有写锁
        try {
            product =  productMapper.get(productId);
            if (product != null) {
                redisUtils.set(PRODUCT_CACHE_KEY + productId, product, getProductCacheTime(), TimeUnit.SECONDS);
            } else {  // 缓存空值 + 失效时间
                redisUtils.set(PRODUCT_CACHE_KEY + productId, EMPTY_CACHE, 30, TimeUnit.SECONDS);
            }
        }finally {
            readLock.unlock();
        }
        return product;
    }

    public Integer getProductCacheTime(){
        return PRODUCT_CACHE_TIMEOUT + new Random().nextInt(5);
    }

    public Product getFromCache(String productCacheKey) {
        Product product = null;
        Object o = redisUtils.get(productCacheKey, Product.class);
        if (o != null) {
            if (EMPTY_CACHE.equals(o)) { // 空缓存读延期, 防止同一个key攻击查数据库
                redisUtils.expire(productCacheKey, 30, TimeUnit.SECONDS);
                return new Product();
            }
            product = (Product) o;   // 读延期
            redisUtils.expire(productCacheKey, getProductCacheTime(), TimeUnit.SECONDS);
        }
        return product;
    }
}
