package multiThreads;

import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author pengfei.hpf
 * @date 2020/3/30
 * @verdion 1.0.0
 */
public class HTMLCrawlII {

    List<String> res = new CopyOnWriteArrayList<>();
    LinkedBlockingDeque<String> queue = new LinkedBlockingDeque<>();
    ConcurrentHashMap<String, String> visited = new ConcurrentHashMap<>();
    String sUrl = "";
    public List<String> crawl(String startUrl, HtmlCrawl.HtmlParser htmlParser) {
        if(startUrl == null || htmlParser == null){
            return res;
        }
        sUrl = getHost(startUrl);
        queue.add(startUrl);
        while(!queue.isEmpty()){
            String url = queue.poll();
            Thread t = new Thread(new Consumer(url, htmlParser));
            t.start();
            try
            {
                t.join();
            }
            catch (InterruptedException e)
            {
                e.printStackTrace();
            }
        }
        return res;
    }

    public String getHost(String startUrl){
        String rest = startUrl.substring(7);
        int idx = rest.indexOf("/");
        if(idx < 0){
            return startUrl;
        } else {
            return "http://" + rest.substring(0, idx);
        }
    }

    class Consumer implements Runnable{
        HtmlCrawl.HtmlParser parser;
        String url;
        Consumer(String u, HtmlCrawl.HtmlParser p){
            parser = p;
            url = u;
        }

        @Override
        public void run() {
            if(url != null && !visited.containsKey(url)){
                visited.put(url, url);
                if(url != null && url.startsWith(sUrl)){
                    res.add(url);
                    for(String u: parser.getUrls(url)){
                        if(u.startsWith(sUrl) && !visited.containsKey(u)){
                            queue.add(u);
                        }
                    }
                }
            }
        }
    }

    public static void main(String[] args){
        HTMLCrawlII s2 = new HTMLCrawlII();
        HtmlCrawl s = new HtmlCrawl();
        HtmlCrawl.HtmlParser hp = s.new HtmlParser();
        List<String> results = s2.crawl("http://news.yahoo.com/news", hp);
        System.out.println("######################");
        for(String r: results){
            System.out.println(r);
        }
    }
}
