package Temp.Delete;

import java.util.ArrayList;
import java.util.List;

public class WebCrawler {
        private int max_depth = 2;
        private int max_pages = 20;
        private int searched_pages = 1;
        private int searched_depth = 0;
        WebPageReader pageReader = new WebPageReader();

        public static void main(String args[]) {
                try {
                        WebCrawler cr = new WebCrawler();
                        cr.readUrl("http://www-cs-students.stanford.edu/~dbyang/");
                } catch (Exception e) {
                        e.printStackTrace();
                }
        }

        private void readUrl(String url) throws Exception {
                if (max_depth < 1)
                        return;
                if (max_pages < 1)
                        return;
                List<String> urls = new ArrayList<String>();
                urls.add(url);
                readPageRecursively(urls);
        }

        private void readPageRecursively(List<String> urls) throws Exception {
                List<String> childurls = new ArrayList<String>();
                for (String url : urls) {
                        if (searched_pages <= max_pages) {
                                System.out.println("reading page..."+url+". Searched pages: "+searched_pages+", Searched depth: "+searched_depth);
                                childurls.addAll(pageReader.parseWebPage(url));
                                searched_pages++;
                        }
                }
                searched_depth++;
                while (searched_depth <= max_depth && searched_pages <= max_pages) {
                        // parse all the linked pages recursively
                        readPageRecursively(childurls);
                }
        }

}
