package com.barkerton.crawler;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; 

import com.barkerton.crawler.parser.Parser;
import com.barkerton.crawler.queuing.CrawlerQueue;
import com.barkerton.crawler.queuing.PageQueue;
import com.barkerton.crawler.util.CrawlerUrl;
import com.barkerton.crawler.util.PropertyManager;

/**
 * Crawls the web utilizing HTTP to grab online content.
 * 
 * @author c.barker
 *
 */
public class Crawler implements Runnable {

	private Log log = LogFactory.getLog(Crawler.class);
	
	private static final long REQUEST_THROTTLE = 2500; 	// sleep between HTTP requests

	private PropertyManager props;
	private CrawlerQueue cQueue;	// Seeds to be processed by the crawler
	private PageQueue pQueue;		// Pages to be processed later by consumers
	private int urlsCrawled;
	private boolean paused;
	private long msThrottle;		// time to sleep between HTTP calls
		
	public Crawler() {
		this.props = PropertyManager.getInstance();
		this.cQueue = CrawlerQueue.getInstance();
		this.pQueue = PageQueue.getInstance();
		this.urlsCrawled = 0;
		this.paused = false;
		this.msThrottle = Long.valueOf( props.getValue("crawl.throttle.ms", Long.toString(REQUEST_THROTTLE)) );
	}
	
	public int getUrlsCrawled() {
		return urlsCrawled;
	}
	
	public synchronized void setPaused(boolean pause) {
		this.paused = pause;
		
		if (!pause) {
			log.info("Crawler no longer paused. Notify waiting thread.");
			notifyAll();
		}
	}
	
	public boolean isPaused() {
		return this.paused;
	}
	
	public void setMsThrottle(long throttle) {
		this.msThrottle = throttle;
	}
	
	public void run() {

		while(true) {		
			processQueue();
			
			Seed seed = null;
			try {
				seed = cQueue.dequeue();
			} catch (InterruptedException ie) {
				log.error("Unable to dequeue seed to crawl: " + ie.getMessage());
			}
			
			if (seed == null)
				continue;
			
			// throttle requests
			try {
				Thread.sleep(this.msThrottle);
			} catch (InterruptedException ie) {
				;;
			}
			
			log.info("URL Dequeued for processing: " + seed.getUrl());
			
			Page p = new Page(seed.getUrl());
			p.setContent( getContent(seed.getUrl()) );
			if (p.getContent() != null) {
				try {
					pQueue.enqueue(p);
					urlsCrawled++;
					
					if (urlsCrawled == seed.getCrawlDepth()) {
						log.debug("Exiting crawl - Reached URL''s crawl depth: " + urlsCrawled + " for " + seed.getUrl());
						break;
					}
					
					log.debug("Links extracted from URL");
					for (Object link : Parser.extractLinks(p.getContent())) {
						
						if ( seed.isSiteOnly() && !CrawlerUrl.normalize(link.toString()).equalsIgnoreCase(CrawlerUrl.normalize(seed.getUrl().toString())) ) {
							log.debug("Exiting crawl - Extracted links no longer part of site''s");
							break;
						}
						
						cQueue.enqueue( new Seed((URL)link) );
						log.debug("  " + (URL)link);
					}
				}
				catch (InterruptedException ie) {
					log.error("Unable to enqueue page: " + ie.getMessage());
				}
			}

			log.info("Current crawler queue size: " + cQueue.size());
		}
	}
	
	private synchronized void processQueue() {
		while (paused) {
			log.info("Crawler is paused will wait.");
			
			try {
				wait();
			} catch (InterruptedException ie) {
				log.error(ie.getMessage());
			}
		}
	}
	
	/**
	 * Method connects to page and downloads URL's page content.
	 */
	private String getContent(URL url) {
		String content = null;
		log.info("HTTP content download for URL " + url);
		
		try {
			HttpURLConnection conn = (HttpURLConnection)url.openConnection();
			conn.setRequestMethod("GET"); 
			conn.connect();
			
			if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
				log.error("Non-successful HTTP response: " + conn.getResponseCode());
				return content;
			}
			
			if (!conn.getContentType().equalsIgnoreCase("text/html")) {
				return content;
			}

			// read input stream from web page
			BufferedReader in = new BufferedReader(
                    new InputStreamReader(
                    		conn.getInputStream()));
			
			log.debug("HTML Source...");
			
			String inputLine;
			StringBuffer buff = new StringBuffer();
			while ((inputLine = in.readLine()) != null) { 
				buff.append(inputLine);
				//log.debug(inputLine);
			}
			
			conn.disconnect();
			content = buff.toString();
		} 
		catch (IOException ioe) {
			ioe.printStackTrace();
		}
		
		return content;
	}
	
	/*
	 * Simple sanity "smoke" test
	 */
	public static void main(String[] args) throws Exception {
		URL url = new URL("http://www.utexas.edu");
		Crawler crawler = new Crawler();
		crawler.setPaused(false);
		String source = crawler.getContent(url);
		//System.out.println(source);
		Parser.extractTitle(source);
		Parser.extractLinks(source);
	}
}
