package cn.yogehaoren.badfox.def;

import cn.yogehaoren.badfox.annotation.Queue;
import cn.yogehaoren.badfox.core.BadFoxQueue;
import cn.yogehaoren.badfox.struct.Request;
import cn.yogehaoren.badfox.utils.GenericUtils;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;

/**
 * @author WangNing
 * date  2022/3/9 15:15
 * @version 1.0
 */
@Queue(mix = true)
public class DefaultLocalMixQueue implements BadFoxQueue {

    private Logger logger = LoggerFactory.getLogger(getClass());
    private Map<String, AtomicLong> countMap = new HashMap<>();
    private LinkedBlockingQueue<Request> queue = new LinkedBlockingQueue<>();
    private Map<String, ConcurrentSkipListSet<String>> processedData = new HashMap<>();

    @Override
    public Request bPop(String crawlerName, long time, TimeUnit timeUnit) {
        try {
            Request request = queue.poll(time, timeUnit);
            if(request!= null){
                AtomicLong counter = getCrawlerCount(request.getCrawlerName());
                if(counter.get()>0){
                    counter.decrementAndGet();
                }
            }
            return request;
        } catch (InterruptedException e) {
            logger.error(e.getMessage(),e);
            return null;
        }

    }

    @Override
    public Request bPop(String crawlerName) {
        try {

            Request request =  queue.take();
            AtomicLong counter = getCrawlerCount(request.getCrawlerName());
            if(counter.get()>0){
                counter.decrementAndGet();
            }
            return request;

        } catch (InterruptedException e) {
            logger.error(e.getMessage(),e);
        }
        return null;

    }

    @Override
    public boolean push(Request req) {

        try {

            queue.put(req);
            getCrawlerCount(req.getCrawlerName()).incrementAndGet();
            return true;
        } catch (InterruptedException e) {
            logger.error(e.getMessage(),e);
        }
        return false;
    }

    @Override
    public long len(String crawlerName) {
        return countMap.getOrDefault(crawlerName, new AtomicLong(0)).get();
    }

    @Override
    public boolean isProcessed(Request req) {
        ConcurrentSkipListSet<String> set = getProcessedSet(req.getCrawlerName());
        String sign = GenericUtils.signRequest(req);
        return set.contains(sign);
    }

    @Override
    public void addProcessed(Request req) {
        ConcurrentSkipListSet<String> set = getProcessedSet(req.getCrawlerName());
        String sign = DigestUtils.md5Hex(req.getUrl());
        set.add(sign);
    }

    @Override
    public long totalCrawled(String crawlerName) {
        ConcurrentSkipListSet<String> set = getProcessedSet(crawlerName);
        return set.size();
    }

    @Override
    public void clearRecord(String crawlerName) {
        ConcurrentSkipListSet<String> set = getProcessedSet(crawlerName);
        set.clear();
    }

    private AtomicLong getCrawlerCount(String crawlerName){
        if(countMap.containsKey(crawlerName)){
            return countMap.get(crawlerName);
        }else {
            AtomicLong counter = new AtomicLong(0);
            countMap.put(crawlerName, counter);
            return counter;
        }
    }

    public ConcurrentSkipListSet<String> getProcessedSet(String crawlerName){
        ConcurrentSkipListSet<String> set = processedData.get(crawlerName);
        if (set == null){
            set = new ConcurrentSkipListSet<>();
            processedData.put(crawlerName,set);
        }
        return set;
    }

}
