/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package com.alag.ci.webscrawler;
import java.io.*;
import java.net.URL;
import java.util.*;
import java.util.regex.*;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.*;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.params.HttpClientParams;
import org.apache.http.impl.client.DefaultHttpClient;

/**
 *
 * @author sczbbx
 */
public class NaiveCrawler {
    
    private static final String USER_AGENT = "User-agent:";
    private static final String DISALLOW = "Disallow:";
    public static final String REGEXP_HTTP = "<a href=\"http://(.)*\">";
    public static final String REGEXP_RELATIVE = "<a href=\"(.)*\">";
    private int maxNumberUrls;
    private long delayBetweenUrls;
    private int maxDepth;
    private Pattern regexpSearchPattern;
    private Pattern httpRegexp;
    private Pattern relativeRegexp;
    private Map<String, CrawlerUrl> visitedUrls = null;
    private Map<String, Collection<String>> sitePermissions = null;
    private Queue<CrawlerUrl> urlQueue = null;
    private BufferedWriter crawlOutput = null;
    private BufferedWriter crawlStaticstics = null;
    private int numberItemsSaved = 0;
    
    public NaiveCrawler(Queue<CrawlerUrl> urlQueue, int maxNumberUrls, 
            int maxDepth, long delayBetweenUrls, String regexpSearchPattern) 
            throws Exception {
        this.urlQueue = urlQueue;
        this.maxNumberUrls = maxNumberUrls;
        this.delayBetweenUrls = delayBetweenUrls;
        this.maxDepth = maxDepth;
        this.regexpSearchPattern = Pattern.compile(regexpSearchPattern);
        this.visitedUrls = new HashMap<String, CrawlerUrl>();
        this.sitePermissions = new HashMap<String, Collection<String>>();
        this.httpRegexp = Pattern.compile(REGEXP_HTTP);
        this.relativeRegexp = Pattern.compile(REGEXP_RELATIVE);
        crawlOutput = new BufferedWriter(new FileWriter("crawl.txt"));
        crawlStaticstics = new BufferedWriter(new FileWriter("crawlStaticstics.txt"));
    }
    
    public void crawl() throws Exception {
        while (continueCrawling()) {
            CrawlerUrl  url = getNextUrl();
            if (url != null) {
                printCrawlInfo();
                String content = getContent(url);
                if (isContentRelevant(content, regexpSearchPattern)) {
                    saveContent(url, content);
                    Collection<String> urlStrings = extractUrls(content, url);
                    addUrlsToUrlQueue(url, urlStrings);
                }
            }
            else {
                System.out.println(url + " is not relevant ignoring ...");
            }
            Thread.sleep(this.delayBetweenUrls);
        }
        closeOutputStream();
    }

    private boolean continueCrawling() {
        return ((!urlQueue.isEmpty()) && (getNumberOfUrlsVisited()
                < this.maxNumberUrls));
    }

    private CrawlerUrl getNextUrl() {
        CrawlerUrl nextUrl = null;
        while ((nextUrl == null) && (!urlQueue.isEmpty())) {
            CrawlerUrl crawlUrl = this.urlQueue.remove();
            if (doWeHavePermissionToVisit(crawlUrl)
                    && !isUrlAlreadyVisited(crawlUrl)
                    && isDepthAcceptable(crawlUrl)) {
                nextUrl = crawlUrl;
            }
        }
        return nextUrl;
    }

    private void printCrawlInfo() throws Exception {
        StringBuilder sb = new StringBuilder();
        sb.append("Queue length = ").append(this.urlQueue.size()).append(" visited urls = ")
                .append(" site permission = ").append(this.sitePermissions.size());
        crawlStaticstics.append("" + getNumberOfUrlsVisited()).append("," + numberItemsSaved)
                .append("," + this.urlQueue.size()).append("," + this.sitePermissions.size() + "\n");
        crawlStaticstics.flush();
        System.out.println(sb.toString());
    }

    private int getNumberOfUrlsVisited() {
        return this.visitedUrls.size();
    }

    private void closeOutputStream() throws Exception {
        crawlOutput.flush();
        crawlOutput.close();
        crawlStaticstics.flush();
        crawlStaticstics.close();
    }
    
    private boolean isDepthAcceptable(CrawlerUrl crawlerUrl) {
        return crawlerUrl.getDepth() <= this.maxDepth;
    }
    
    private boolean isUrlAlreadyVisited(CrawlerUrl crawlerUrl) {
        if ((crawlerUrl.isVisited()) || (this.visitedUrls.containsKey(crawlerUrl.getUrlString()))) {
            return true;
        }
        return false;
    }

    private boolean doWeHavePermissionToVisit(CrawlerUrl crawlUrl) {
        if (crawlUrl == null) {
            return false;
        }
        if (!crawlUrl.isCheckedForpermission()) {
            crawlUrl.setAllowedToVisit(computePermissionForVisiting(crawlUrl));
        }
        return crawlUrl.isAllowedToVisit();
    }

    private boolean computePermissionForVisiting(CrawlerUrl crawlUrl) {
        URL url = crawlUrl.getURL();
        boolean retValue = (url != null);
        if (retValue) {
            String host = url.getHost();
            Collection<String> disallowedPaths = this.sitePermissions.get(host);
            if (disallowedPaths == null) {
                disallowedPaths = parseRobotsTxtFileToGetDisallowedPaths(host);
            }
            String path = url.getPath();
            for (String disallowedPath: disallowedPaths) {
                if (path.contains(disallowedPath)) {
                    retValue = false;
                }
            }
        }
        return retValue;
    }

    private Collection<String> parseRobotsTxtFileToGetDisallowedPaths(String host) {
        String robotFilePath = getContent("http://" + host + "/robot.txt");
        Collection<String> disallowPaths = new ArrayList<String>();
        if (robotFilePath != null) {
            Pattern p = Pattern.compile(USER_AGENT);
            String[] permissionSets = p.split(robotFilePath);
            String permissionString = "";
            for (String permission: permissionSets) {
                if (permission.trim().startsWith("*")) {
                    permissionString = permission.substring(1);
                }
            }
            p = Pattern.compile(DISALLOW);
            String[] items = p.split(permissionString);
            for (String s: items) {
                disallowPaths.add(s.trim());
            }
        }
        this.sitePermissions.put(host, disallowPaths);
        return disallowPaths;
    }

    private String getContent(String string) {
        return getContent(new CrawlerUrl(string, 0));
    }

    private String getContent(CrawlerUrl crawlerUrl) {
        HttpClient client = new DefaultHttpClient();
        HttpGet request = new HttpGet(crawlerUrl.getUrlString());
        String text = null;
        try {
            HttpResponse response = client.execute(request);
            int statusCode = response.getStatusLine().getStatusCode();
            if (statusCode == HttpStatus.SC_OK) {
                text = readContentsFromStream(new InputStreamReader(response.getEntity().getContent()));
            }
        }
        catch (Throwable t) {
            System.out.println(t.toString());
            t.printStackTrace();
        }
        markUrlAsVisited(crawlerUrl);
        return text;
    }

    private String readContentsFromStream(Reader input) throws IOException {
        BufferedReader bufferedReader = null;
        if (input instanceof BufferedReader) {
            bufferedReader = (BufferedReader) input;
        }
        else {
            bufferedReader = new BufferedReader(input);
        }
        StringBuilder sb = new StringBuilder();
        char[] buffer = new char[4 * 1024];
        int charsRead;
        while ((charsRead = bufferedReader.read(buffer)) != -1) {
            sb.append(buffer, 0, charsRead);
        }
        return sb.toString();
    }

    private void markUrlAsVisited(CrawlerUrl crawlerUrl) {
        this.visitedUrls.put(crawlerUrl.getUrlString(), crawlerUrl);
        crawlerUrl.setIsVisited();
    }

    private Collection<String> extractUrls(String text, CrawlerUrl url) {
        Map<String, String> urlMap = new HashMap<String, String>();
        extractHttpUrls(urlMap, text);
        extractRelativeUrls(urlMap, text, url);
        return new ArrayList<String>(urlMap.keySet());
    }

    private void extractHttpUrls(Map<String, String> urlMap, String text) {
        Matcher m = httpRegexp.matcher(text);
        while (m.find()) {
            String url = m.group();
            String[] terms = url.split("a href=\"");
            for (String term: terms) {
                if (term.startsWith("http")) {
                    int index = term.indexOf("\"");
                    if (index > 0) {
                        term = term.substring(0, index);
                    }
                    urlMap.put(term, term);
                }
            }
        }
    }

    private void extractRelativeUrls(Map<String, String> urlMap, String text, CrawlerUrl crawlerUrl) {
        Matcher m = relativeRegexp.matcher(text);
        URL textURL = crawlerUrl.getURL();
        String host = textURL.getHost();
        while (m.find()) {
            String url = m.group();
            String[] terms = url.split("a href=\"");
            for (String term: terms) {
                if (term.startsWith("/")) {
                    int index = term.indexOf("\"");
                    if (index > 0) {
                        term = term.substring(0, index);
                    }
                    String s = "http://" + host + term;
                    urlMap.put(s, s);
                }
            }
        }
    }

    private void addUrlsToUrlQueue(CrawlerUrl url, Collection<String> urlStrings) {
        int depth = url.getDepth() + 1;
        for (String urlString: urlStrings) {
            if (!this.visitedUrls.containsKey(urlString)) {
                this.urlQueue.add(new CrawlerUrl(urlString, depth));
            }
        }
    }

    public static boolean isContentRelevant(String content, Pattern regexpPattern) {
        boolean retValue = false;
        if (content != null) {
            Matcher m = regexpPattern.matcher(content.toLowerCase());
            retValue = m.find();
        }
        return retValue;
    }

    private void saveContent(CrawlerUrl url, String content) throws Exception {
        this.crawlOutput.append(url.getUrlString()).append("\n");
        this.crawlOutput.flush();
        numberItemsSaved++;
    }
    
}
