package spider.web;


import java.util.HashSet;
import java.util.Set;

/**
 * @author: zhangzeli
 * @date 19:02 2018/5/4
 * 1,结构中保存的url不能重复
 * 2,能够快速地查找
 */
public class LinkQueue {
    //已经访问的url集合
    private static Set visitedUrl = new HashSet();
    //待访问的url集合
    private static UrlQueue unVisitedUrl = new UrlQueue();

    //获取url队列
    public static UrlQueue getUnVisitedUrl(){
        return unVisitedUrl;
    }
    //添加到访问过的url队列中
    public static void addVisitedUrl(Object url){
        visitedUrl.add(url);
    }
    //移除访问过的url
    public static void removeVisitedUrl(Object url){
        visitedUrl.remove(url);
    }
    /**
     * 未访问的url出队列
     */
    public static Object unVisitedUrlDequeue(){
        return unVisitedUrl.deQueue();
    }

    public static void addUnvisItedUrl(String url){
        if(url!=null&&!url.equals("")&&!visitedUrl.contains(url)&&!unVisitedUrl.contians(url)){
            unVisitedUrl.enQueue(url);
        }
    }

    public static int getVisitedUrlNum(){
        return visitedUrl.size();
    }

    public static boolean unVisitedUrlEmpty(){
        return unVisitedUrl.isQueueEmpty();
    }
}
