package cn.yogehaoren.badfox.def;

import cn.yogehaoren.badfox.annotation.Queue;
import cn.yogehaoren.badfox.core.BadFoxQueue;
import cn.yogehaoren.badfox.exception.BadFoxInitExcepiton;
import cn.yogehaoren.badfox.spring.boot.BadFoxProperties;
import cn.yogehaoren.badfox.spring.common.BadFoxCache;
import cn.yogehaoren.badfox.struct.Request;
import cn.yogehaoren.badfox.utils.GenericUtils;
import org.apache.commons.codec.digest.DigestUtils;
import org.redisson.api.RBlockingQueue;
import org.redisson.api.RBloomFilter;
import org.redisson.api.RedissonClient;
import org.redisson.client.codec.StringCodec;
import org.redisson.codec.FstCodec;
import org.redisson.codec.JsonJacksonCodec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;

import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 */
@Queue(mix = true)
public class DefaultRedixMixQueue implements BadFoxQueue {

    private String quueNamePrefix = "BADFOX_CRAWLER_QUEUE_MIX_";
    private String setNamePrefix = "BADFOX_CRAWLER_SET_MIX_";
    private Logger logger = LoggerFactory.getLogger(getClass());

    /**
     * 默认的数据量预计消耗100M 内存,请根据使用情况自行调整
     * 可参考 <a href="https://hur.st/bloomfilter/?n=100M&p=0.01&m=&k=">bloomfilter</a>
     */
    private long expectedInsertions = 1_0000_0000L;
    private double falseProbability = 0.01;
    private RBlockingQueue<Request> queue = null;
    private Map<String,RBloomFilter<String>> bloomFilterCache = new HashMap<>();

    @Autowired(required = false)
    private BadFoxProperties crawlerProperties;
    @Autowired(required = false)
    private RedissonClient redisson;

    @PostConstruct
    public void init() {
        boolean isEnableRedissonQueue = false;

        if (crawlerProperties != null && crawlerProperties.isEnableRedissonQueue()) {
            //spring boot 方式
            isEnableRedissonQueue = true;
            if (crawlerProperties.getBloomFilterExpectedInsertions() > 0) {
                expectedInsertions = crawlerProperties.getBloomFilterExpectedInsertions();
            }
            if (crawlerProperties.getBloomFilterFalseProbability() > 0) {
                falseProbability = crawlerProperties.getBloomFilterFalseProbability();
            }
        }

        if (isEnableRedissonQueue) {
            if (redisson == null) {
                logger.error("");
                throw new BadFoxInitExcepiton(" 检测到启用了的 Redison queue，但是没有发现Redisson配置，请参考：https://github.com/redisson/redisson/wiki/2.-%E9%85%8D%E7%BD%AE%E6%96%B9%E6%B3%95");
            }
        }
        logger.info("springboot={},isEnableRedissonQueue = {},redisson = {}", BadFoxCache.isSpringBoot(), isEnableRedissonQueue, redisson);
    }

    @Override
    public Request bPop(String crawlerName, long time, TimeUnit timeUnit) {
        Request request = null;
        try {
            RBlockingQueue<Request> rBlockingQueue = getQueue(crawlerName);
            request = rBlockingQueue.poll(time, timeUnit);
        } catch (Exception e) {
            logger.warn(e.getMessage(), e);
        }
        return request;
    }

    @Override
    public Request bPop(String crawlerName) {
        Request request = null;
        try {
            RBlockingQueue<Request> rBlockingQueue = getQueue(crawlerName);
            request = rBlockingQueue.take();
        } catch (Exception e) {
            logger.warn(e.getMessage(), e);
        }
        return request;
    }

    @Override
    public boolean push(Request req) {
        try {
            RBlockingQueue<Request> rBlockingQueue = getQueue(req.getCrawlerName());
            rBlockingQueue.put(req);
            return true;
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
        return false;
    }

    @Override
    public long len(String crawlerName) {
        long len = 0;
        try {
            RBlockingQueue<Request> rBlockingQueue = getQueue(crawlerName);
            len = rBlockingQueue.size();
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
        return len;
    }

    @Override
    public boolean isProcessed(Request req) {
        boolean res = false;
        try {
            String sign = GenericUtils.signRequest(req);
            RBloomFilter<String> bloomFilter = getFilter(req.getCrawlerName());
            res = bloomFilter.contains(sign);
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
        return res;
    }

    @Override
    public void addProcessed(Request req) {
        try {
            String sign = DigestUtils.md5Hex(req.getUrl());
            RBloomFilter<String> bloomFilter = getFilter(req.getCrawlerName());
            bloomFilter.add(sign);
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
    }

    @Override
    public long totalCrawled(String crawlerName) {
        long count = 0;
        try {
            RBloomFilter<String> bloomFilter = getFilter(crawlerName);
            count = bloomFilter.count();
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
        return count;
    }

    /**
     * 清除抓取记录
     *
     */
    @Override
    public void clearRecord(String crawlerName) {
        try {
            RBloomFilter<String> bloomFilter = getFilter(crawlerName);
            bloomFilter.delete();
        } catch (Exception e) {
            logger.warn(e.getMessage());
        }
    }

    private synchronized RBlockingQueue<Request> getQueue(String crawlerName){

        if (queue == null){
            queue = redisson.getBlockingQueue(quueNamePrefix,  new JsonJacksonCodec());
        }
        return queue;

    }

    private RBloomFilter<String> getFilter(String crawlerName){
        RBloomFilter<String> bloomFilter = bloomFilterCache.get(crawlerName);
        if (bloomFilter == null){
            bloomFilter = redisson.getBloomFilter(setNamePrefix + crawlerName, new StringCodec());
            bloomFilter.tryInit(expectedInsertions, falseProbability);
            bloomFilterCache.put(crawlerName,bloomFilter);
        }
        return bloomFilter;
    }
}
