package corpustools;

import swutils.In;
import strings.AhoCorasickTST;
import strings.SetMatch;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.Queue;
import java.util.LinkedList;
import java.util.Set;
import java.util.TreeSet;

/**
 * Test program for Utf-8 validator. Download and validate some web
 * pages. We assume (falsely, of course) that all pages on the web are
 * in Utf-8.
 */
public class WebCrawler {

    /** No constructor needed. */
    private WebCrawler() { }


    /**
     * @param args args[0] is starting URL.
     */
    public static void main(String[] args) {

        final int RADIUS = 20;
        SetMatch match = new AhoCorasickTST();
        ValidUtf8 validator = new ValidUtf8(match);
        String  regexp  = "http://(\\w+\\.)*(\\w+)";
        Pattern pattern = Pattern.compile(regexp);

        // Naively assumed character blocks that the whole world uses.
        Character.UnicodeBlock[] blocks = {
            Character.UnicodeBlock.BASIC_LATIN,
            Character.UnicodeBlock.LATIN_1_SUPPLEMENT,
            Character.UnicodeBlock.GENERAL_PUNCTUATION,
        };


        // timeout connection after 500 miliseconds
        // To set, could also use:
        // java -Dsun.net.client.defaultConnectTimeout=250
        System.setProperty("sun.net.client.defaultConnectTimeout", "500");
        System.setProperty("sun.net.client.defaultReadTimeout",    "1000");


        // list of web pages to be examined
        Queue<String> q = new LinkedList<String>();
        if (args.length != 0) {
            q.add(args[0]);
        } else {
            // Some arbitrary books as starting point
            String[] amazonBooks =
            {
                "http://www.amazon.com/BUGS-Writing-Revised-Guide-Debugging"
                + "/dp/020137921X/ref=sr_1_1?ie=UTF8&s=books&qid="
                + "1282381826&sr=8-1",
                "http://www.amazon.de/Penguin-Dictionary-English-Grammar-"
                + "Reference/dp/0140514643/ref=sr_1_4?ie=UTF8&s=books-intl-"
                + "de&qid=1282383052&sr=8-4",
                "http://www.amazon.com/Eats-Shoots-Leaves-Tolerance-"
                + "Punctuation/dp/1592402038/ref=sr_1_1?ie=UTF8&s=books"
                + "&qid=1282383112&sr=8-1",
            };
            for (String b : amazonBooks) {
                q.add(b);
            }
        }

        // existence symbol table of examined web pages
        Set<String> set = new TreeSet<String>();
        //set.add(s);

        // breadth first search crawl of web
        while (!q.isEmpty()) {
            String v = q.remove();
            System.out.println(v);

            In in = new In(v);

            // only needed in case website does not respond
            if (!in.exists()) { continue; }

            String input = in.readAll();

            if (input == null) { continue; }

            System.out.println("\n");
            if (!v.matches(".*wikipedia.*")) {

                validator.validateFile(v, RADIUS, blocks);
            }


           /*************************************************************
            *  Find links of the form: http://xxx.yyy.zzz
            *  \\w+ for one or more alpha-numeric characters
            *  \\. for dot
            *  could take first two statements out of loop
            *************************************************************/
            Matcher matcher = pattern.matcher(input);

            // find and print all matches
            while (matcher.find()) {
                String w = matcher.group();
                if (!set.contains(w)) {
                    q.add(w);
                    set.add(w);
                }
            }

        }
   }
}
