/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package testrobotstxt;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.StringTokenizer;

/**
 *
 * @author andrei
 */
public class Main {

    /**
     * @param args the command line arguments
     */
    public static void main(String[] args) {
        System.out.println(args[0]);
        if (args.length == 1) {

            URL urlRobot = null;

            try {
                urlRobot = new URL(args[0]);
            } catch (MalformedURLException e) {
                // something weird is happening, so don't trust it
                System.err.println("We've got a malformed URL: " + e.toString());
            }

            if (isLinkRobotSafe2(urlRobot)) {
                System.out.println("Link is OK with the Robot !");
            } else {
                System.out.println("Ups.. Link is not OK with the Robot !");
            }
        } else {
            System.out.println("Give a URL as an argument !");
        }
    }

    public static boolean isLinkRobotSafe(URL url) {

        String strHost = url.getHost();

        // form URL of the robots.txt file
        String strRobot = "http://" + strHost + "/robots.txt";
        URL urlRobot;

        try {
            urlRobot = new URL(strRobot);
        } catch (MalformedURLException e) {
            // something weird is happening, so don't trust it
            return false;
        }

        String robotsCommandsString;
        try {
            InputStream urlRobotStream = urlRobot.openStream();

            // read in entire file
            byte b[] = new byte[1000];
            int numRead = urlRobotStream.read(b);
            robotsCommandsString = new String(b, 0, numRead);

            while (numRead != -1) {
                numRead = urlRobotStream.read(b);
                if (numRead != -1) {
                    String newCommands = new String(b, 0, numRead);
                    robotsCommandsString += newCommands;
                }
            }
            urlRobotStream.close();
        } catch (IOException e) {
            // if there is no robots.txt file, it is OK to search
            return true;
        }

        // assume that this robots.txt refers to us and
        // search for "Disallow:" commands.
        String strURL = url.getFile();
        int index = 0;
        while ((index = robotsCommandsString.indexOf("Disallow:", index)) != -1) {
            index += "Disallow:".length();
            String strPath = robotsCommandsString.substring(index);
            StringTokenizer st = new StringTokenizer(strPath);

            if (!st.hasMoreTokens()) {
                break;
            }

            String strBadPath = st.nextToken();

            // if the URL starts with a disallowed path, it is not safe
            if (strURL.indexOf(strBadPath) == 0) {
                return false;
            }
        }

        return true;
    }

    //taken from: http://www.devarticles.com/c/a/Java/Crawling-the-Web-with-Java/8/
    public static boolean isLinkRobotSafe2(URL urlToCheck) {

        String host = urlToCheck.getHost().toLowerCase();
        BufferedReader reader = null;

        // create disallow list
        ArrayList disallowList = new ArrayList();

        try {
            URL robotsFileUrl =
                    new URL("http://" + host + "/robots.txt");
            // Open connection to robot file URL for reading.
            reader =
                    new BufferedReader(new InputStreamReader(
                    robotsFileUrl.openStream()));
        } catch (IOException e) {
            System.err.println("Got an error while reading the robots.txt file: " + e.getMessage());
        }
        String line;
        try {
            while ((line = reader.readLine()) != null) {
                if (line.indexOf("Disallow:") == 0) {
                    String disallowPath = line.substring("Disallow:".length());
                    // Check disallow path for comments and remove if present.
                    int commentIndex = disallowPath.indexOf("#");
                    if (commentIndex != - 1) {
                        disallowPath = disallowPath.substring(0, commentIndex);
                    }
                    disallowPath = disallowPath.trim();
                    // Add disallow path to list.
                    disallowList.add(disallowPath);
                }
            }
        } catch (Exception e) {
            /* Assume robot is allowed since an exception
            is thrown if the robot file doesn't exist. */
            return true;
        }

        /* Loop through disallow list to see if the
        crawling is allowed for the given URL. */
        String file = urlToCheck.getFile();
        for (int i = 0; i < disallowList.size(); i++) {
            String disallow = (String) disallowList.get(i);
            if (file.startsWith(disallow)) {
                return false;
            }
        }

        //by default return true
        return true;
    }
}
