package multiThreads;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

public class HtmlCrawl {

    private static String host;
    private ConcurrentLinkedQueue<String> midUrls = new ConcurrentLinkedQueue<>();
    private Set<String> resultUrls = ConcurrentHashMap.newKeySet();
    private Semaphore semaphore;

    public class HtmlParser{

        public List<String> getUrls(String url){
            if("http://news.yahoo.com/news".equalsIgnoreCase(url)){
                return Arrays.asList("http://news.yahoo.com/news/123", "http://news.yahoo.com/us");
            }
            return null;
        }
    }

    public class Crawl implements Runnable {
        HtmlParser htmlParser;
        String url;
        private Semaphore semaphore;

        public Crawl(HtmlParser hp, String u, Semaphore s){
            htmlParser = hp;
            url = u;
            semaphore = s;
        }

        @Override
        public void run() {
            if (url.startsWith(host)) {
                resultUrls.add(url);
                List<String> urls = htmlParser.getUrls(url);
                if(urls != null) {
                    midUrls.addAll(urls);
                }
            }
            semaphore.release();
        }
    }


    public List<String> crawl(String startUrl, HtmlParser htmlParser) {
        host = getHost(startUrl);
        midUrls.add(startUrl);
        BlockingQueue<Runnable> queue = new LinkedBlockingQueue<>(1000);
        semaphore = new Semaphore(0);
        ThreadPoolExecutor executor = new ThreadPoolExecutor(100, 1000, 1000, TimeUnit.MILLISECONDS, queue);

        while(!midUrls.isEmpty()){
            String url = midUrls.poll();
            Crawl c = new Crawl(htmlParser, url, semaphore);
            executor.execute(c);
            try {
                semaphore.acquire();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
        executor.shutdown();
        return new ArrayList<>(resultUrls);

    }

    public String getHost(String startUrl){
        String rest = startUrl.substring(7);
        int idx = rest.indexOf("/");
        if(idx < 0){
            return startUrl;
        } else {
            return "http://" + rest.substring(0, idx);
        }
    }

    public static void main(String[] args){
        HtmlCrawl s = new HtmlCrawl();
        HtmlParser hp = s.new HtmlParser();
        List<String> results = s.crawl("http://news.yahoo.com/news", hp);
        System.out.println("######################");
        for(String r: results){
            System.out.println(r);
        }
    }
}
