/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package threads;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;

/**
 *
 * @author andrei
 * taken from: http://www.devarticles.com/c/a/Java/Crawling-the-Web-with-Java/8/
 */
public class AllowedRobotsURLList {

    private ArrayList disallowList;

    public AllowedRobotsURLList(String initURLString) throws MalformedURLException {

        URL initURL = new URL(initURLString);

        String host = initURL.getHost().toLowerCase();
        BufferedReader reader = null;

        // create disallow list
        disallowList = new ArrayList();

        try {
            URL robotsFileUrl =
                    new URL("http://" + host + "/robots.txt");
            // Open connection to robot file URL for reading.
            reader =
                    new BufferedReader(new InputStreamReader(
                    robotsFileUrl.openStream()));
        } catch (IOException e) {
            System.err.println("Got an error while reading the robots.txt file: " + e.getMessage());
            System.err.println("Most likely website is not protected by robots text file. All links are allowed");
        }
        String line;
        try {
            while ((line = reader.readLine()) != null) {
                if (line.indexOf("Disallow:") == 0) {
                    String disallowPath = line.substring("Disallow:".length());
                    // Check disallow path for comments and remove if present.
                    int commentIndex = disallowPath.indexOf("#");
                    if (commentIndex != - 1) {
                        disallowPath = disallowPath.substring(0, commentIndex);
                    }
                    disallowPath = disallowPath.trim();
                    // Add disallow path to list.
                    disallowList.add(disallowPath);
                }
            }
        } catch (Exception e) {
            /* Assume robot is allowed since an exception
            is thrown if the robot file doesn't exist. */
            //return true;
        }
    }

    public boolean isLinkRobotSafe(URL urlToCheck) {
        /* Loop through disallow list to see if the
        crawling is allowed for the given URL. */
        String file = urlToCheck.getFile();
        for (int i = 0; i < disallowList.size(); i++) {
            String disallow = (String) disallowList.get(i);
            if (file.startsWith(disallow)) {
                return false;
            }
        }

        //by default return true
        return true;
    }
}
