package com4280;

import java.io.*;
import java.net.*;
import java.util.*;

/**
 * This is the main class for the spider.
 *
 * @author Duncan Grant 
 * @author Andrew Evans 
 * @author Mitchell Whitehouse
 * @version 1.0
 */
public class MADBot implements myIWSpider {
    private Thread spiderThread;
    private Spider spider;
    private MainScreen mainScreen;
    // ArrayList to be populated with disallowed URLs (as stated in robots.txt file)
    private ArrayList<String> disallowed;
    // starting server
    private String seed;
    ArrayList<URL> internalLinks;
    ArrayList<URL> externalLinks;
    // the output file destinations
    private final String LOCAL_FILE = "localIWURLs.txt";
    private final String EXTERNAL_FILE = "externalIWURLs.txt";
    private final String OUTPUT_DIRECTORY = "Output" + File.separator + "Spider";
    File dirs = new File(OUTPUT_DIRECTORY);

    /**
     * Main constructor, starts the program.
     */
    public MADBot() {
        // make directories if they don't exist
        if (!dirs.exists()) {
            dirs.mkdirs();
        }
        openUserInterface();
        disallowed = new ArrayList<String>();
        internalLinks = new ArrayList<URL>();
        externalLinks = new ArrayList<URL>();
    }

    /**
     * Shows the GUI to the user.
     */
    public void openUserInterface() {
        mainScreen = new MainScreen(this);
    }

    /**
     * Closes the GUI.
     */
    public void closeUserInterface() {
        mainScreen.dispose();
    }

    /**
     * Populates an ArrayList of the disallowed URLs for the spider (as stated in robots.txt file).
     * Starts crawling on the specified server.
     * @param mySeed the String of the URL on which the spider will start crawling.
     */
    public void startIWSpider(final String mySeed) {
        disallowed = checkRobotsFile(mySeed);

        seed = mySeed;

        spider = new Spider(this, mySeed);
        spiderThread = new Thread(spider);
        spiderThread.start();
    }

    public void addOutput(String output) {
        mainScreen.addOutput(output);
    }

    /**
     *
     * @param myUrl the URL to be tested if allowed/disallowed.
     * @return true if myUrl is not disallowed by the robots.txt file.
     */
    public boolean isIWRobotSafe(final String myUrl) {
        for (int i = 0; i < disallowed.size(); i++) {
            //if not, then try 'contains'
            if (myUrl.contains(disallowed.get(i))) {
                return false;
            }
        }
        return true;
    }

    /**
     * Takes a base URL and finds the robots.txt file relative to this. Any URI which is disallowed for either all robots
     * or specifically the MADBOT robot will be added to an ArrayList<String> and returned (so that the spider will not
     * access these areas of the server).
     *
     * @param myUrl the base URL (server root). The robots.txt file will be found relative to this URL parameter.
     * @return An ArrayList<String> of every disallowed directory/file as specified in the server's robots.txt file.
     *         There are a number of clauses for the special case in which the robots.txt is located on the dcs testing server
     *         (i.e. not in root directory). Also on the dcs testing server, the robots.txt file is unusual as it states disallowed
     *         directories relative to 2 different base URLs.
     */
    private ArrayList<String> checkRobotsFile(String myUrl) {
        ArrayList<String> notAllowed = new ArrayList<String>(20);
        String robotsURL;
        // Special case where the server is the DCS testing server: i.e. robots.txt is not in root directory.
        if (myUrl.startsWith("http://poplar.dcs.shef.ac.uk/")) {
            robotsURL = "http://poplar.dcs.shef.ac.uk/~u0082/intelweb2/robots.txt";
        } else {
            if (myUrl.endsWith("/")) {
                robotsURL = myUrl + "robots.txt";
            } else {
                robotsURL = myUrl + "/robots.txt";
            }
        }
        URL testURL = null;
        try {
            testURL = new URL(robotsURL);
        } catch (MalformedURLException e) {
            e.printStackTrace();
        }
        // read robots.txt file
        try {
            BufferedReader robotsFile = new BufferedReader(new InputStreamReader(testURL.openStream()));
            String inputLine;
            String currentUserAgent = "";
            while ((inputLine = robotsFile.readLine()) != null) {
                // find where robots.txt file is referring to a user-agent
                if (inputLine.toLowerCase().startsWith("user-agent:")) {
                    currentUserAgent = inputLine.toLowerCase().split(":")[1].trim();
                }
                // if current user-agent refers to us
                else if (currentUserAgent.startsWith("MADBOT") || currentUserAgent.equals("*")) {
                    if (inputLine.toLowerCase().startsWith("disallow:")) {
                        // seedURL = starting base URL
                        // relativeURL = URL found in robots.txt
                        // outputURL = combined seed+relative URLs to add to ArrayList
                        String seedURL, relativeURL, outputURL;
                        if (myUrl.endsWith("/"))
                            seedURL = myUrl.substring(0, myUrl.length() - 1);
                        else seedURL = myUrl;
                        if (myUrl.startsWith("http://poplar.dcs.shef.ac.uk"))
                            seedURL = "http://poplar.dcs.shef.ac.uk/~u0082/intelweb2";
                        relativeURL = inputLine.split(":")[1].trim();
                        if (!relativeURL.startsWith("/"))
                            relativeURL = "/".concat(relativeURL);
                        if (relativeURL.endsWith("/"))
                            relativeURL = relativeURL.substring(0, relativeURL.length() - 1);
                        // Accounts for the inconsistency in relative URLs on the testing server!
                        if (relativeURL.startsWith("/~u0082/intelweb2"))
                            relativeURL = relativeURL.substring("/~u0082/intelweb2".length());
                        outputURL = seedURL.concat(relativeURL);
                        outputURL = outputURL.trim();
                        // add to ArrayList
                        notAllowed.add(outputURL);
                    }
                }
            }
        } catch (IOException e) {
            System.err.println(e);
        }
        notAllowed.trimToSize();
        return notAllowed;
    }

    /**
     * Writes out all of the found internal links to a local links file.
     */
    public void writeLocalFile() {
        try {
            BufferedWriter writer;
            if(!dirs.exists()) {
                writer = new BufferedWriter(new FileWriter(new File(LOCAL_FILE)));
                System.out.println("Could not create directory, outputting local URLs in current directory.");
            } else {
                writer = new BufferedWriter(new FileWriter(new File(OUTPUT_DIRECTORY, LOCAL_FILE)));
            }
            for (URL i : internalLinks) {
                writer.write(i.toString());
                writer.newLine();
            }

            writer.close();
        } catch (IOException ioe) {
            System.err.println(ioe);
        }
    }

    /**
     * Writes out all of the external links to an external links file.
     */
    public void writeExternalFile() {
        try {
            BufferedWriter writer;
            if(!dirs.exists()) {
                writer = new BufferedWriter(new FileWriter(new File(EXTERNAL_FILE)));
                System.out.println("Could not create directory, outputting external URLs in current directory.");
            } else {
                writer = new BufferedWriter(new FileWriter(new File(OUTPUT_DIRECTORY, EXTERNAL_FILE)));
            }
            for (URL i : externalLinks) {
                writer.write(i.toString());
                writer.newLine();
            }

            writer.close();
        } catch (IOException ioe) {
        }
    }

    /**
     * Temporarily stops the spider (can later be resumed).
     */
    public void stopIWSpider() {
        spider.paused = true;
    }

    /**
     * Resumes the spider from the position it was at when it was last stopped.
     */
    public void resumeIWSpider() {
        synchronized (spider.eventObject) {
            spider.paused = false;
            spider.eventObject.notifyAll();
        }
    }

    /**
     * Forces the spider to stop.
     */
    public void killIWSpider() {
        spider.cancel();
    }

    /**
     *
     * @return a String array containing all of the local URLs found while spidering.
     */
    public String[] getLocalIWUrls() {
        String[] links = new String[internalLinks.size()];
        for (int i = 0; i < internalLinks.size(); i++)
            links[i] = internalLinks.get(i).toString();

        return links;
    }

    /**
     *
     * @return a String array containing all of the external URLs found while spidering.
     */
    public String[] getExternalIWURLs() {
        String[] links = new String[externalLinks.size()];
        for (int i = 0; i < externalLinks.size(); i++)
            links[i] = externalLinks.get(i).toString();

        return links;
    }

    /**
     *
     * @param url the URL to test.
     * @return true if and only if the URL is within the server space and the URL is not disallowed by the server's
     * robots.txt file.
     */
    public boolean spiderFoundURL(URL url) {
        if (isIWRobotSafe(url.toString())) {
            if (isWithinServer(url) && !internalLinks.contains(url))
                internalLinks.add(url);
            else if (!isWithinServer(url) && !externalLinks.contains(url))
                externalLinks.add(url);
            return isWithinServer(url);
        } else return false;
    }

    /**
     *
     * @param url the URL to test
     * @return true if the URL is within the server space.
     */
    public boolean isWithinServer(URL url) {
        if (url.toString().startsWith("http://") || url.toString().startsWith("https://")) {
            if (url.toString().contains(seed))
                return true;
            else
                return false;
        }

        return false;
    }

    public static void main(String[] args) {
        new MADBot();
		//System.out.println("Hi");
    }
}