package org.correct.crawler;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Calendar;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.correct.config.Config;
import org.correct.util.Queue;

/**
 * To start <code>CrawlManager2</code>, the following steps should be
 * done before the <code>start()</code> is called: <br>
 * 1. Add at least one seed URL. <br>
 * 2. Assign an analyzeManager to this class. <br>
 * <br>
 * This class is different from <code>CrawlManager</code> that in this
 * class, it is not necessary to create a new thread for each web page
 * to fetch. A constant set of threads will be used during the whole
 * lifecycle of the program. 
 * 
 * @author Li Yujia
 *
 */
public class CrawlManager2 {
	
	private class Fetcher extends Thread {
		public void run() {
			String url = null;
			while(true) {
				while((url = urlQueue.poll()) == null) {
					synchronized(CrawlManager2.this) {
						try {
							CrawlManager2.this.wait();
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
				}
				
				fetch(url);
			}
		}
		
		private void fetch(String urlStr) {
			try {
				URL url = null;
				
				if(!(urlStr.startsWith("http://") || urlStr.startsWith("https://"))) {
					url = new URL("http://" + urlStr);
				} else {
					url = new URL(urlStr);
				}
				
				System.out.println("f> " + this + " Fetching " + url + " ...");
				
				URLConnection conn = url.openConnection();
				HttpURLConnection httpConn = null;
				if(conn instanceof HttpURLConnection)
					httpConn = (HttpURLConnection)conn;
				else {
					conn = null;
					return;
				}
				conn = null;
				
				httpConn.setConnectTimeout(2000);
				httpConn.setReadTimeout(2000);
				httpConn.setInstanceFollowRedirects(false);
				
				httpConn.connect();
				
				// Check one redirection
				if(httpConn.getResponseCode() / 100 == 3) {
					String redirectUrl = httpConn.getHeaderField("Location");
					if(redirectUrl != null)
						tryAddRedirectUrl(redirectUrl);
					return;
				}
				
				String typeStr = httpConn.getContentType();
				String encoding = "gbk";
				if(typeStr != null) {
					Matcher m = pattern.matcher(typeStr);
					if(m.find())
						encoding = m.group(1);
				}
				
				long modifiedTime = httpConn.getLastModified();
				Calendar time = Calendar.getInstance();
				if(modifiedTime > 0)
					time.setTimeInMillis(modifiedTime);
				
				/*
				System.out.println("Content Length: " + httpConn.getContentLength());
				System.out.println("Content Type: " + httpConn.getContentType());
				System.out.println("Content Encoding: " + httpConn.getContentEncoding());
				System.out.println("Last Modified(long): " + httpConn.getLastModified());
				System.out.println("Last Modified: " + new Date(conn.getLastModified()));
				*/
				
				BufferedReader r = new BufferedReader(
						new InputStreamReader(httpConn.getInputStream(), encoding));
				
				StringBuffer html = new StringBuffer();
				String s = null;
				while((s = r.readLine()) != null)
					html.append(s + "\n");
				httpConn.disconnect();
				httpConn = null;
				
				analyzeManager.addHtml(urlStr, html.toString(), encoding, time);
				
			} catch (MalformedURLException e) {
				e.printStackTrace();
			} catch (SocketTimeoutException e) {
				e.printStackTrace();
			} catch (IOException e) {
				analyzeManager.addDenyDomain(urlStr);
				e.printStackTrace();
			} catch (IllegalArgumentException e) {
				e.printStackTrace();
			}
		}
	}
	
	private Queue<String> urlQueue;
	private AnalyzeManager2 analyzeManager;
	private final int maxCrawlThreadNum;
	private final Pattern pattern = Pattern.compile("charset=([\\w-]+)");
	private Fetcher[] fetcher;
	private final int maxQueueLength = 20000;
	private final int timeOut;
	
	public CrawlManager2() {
		urlQueue = new Queue<String>();
		this.maxCrawlThreadNum = 20;
		this.timeOut = Config.DEFAULT_TIMEOUT;
	}
	
	public CrawlManager2(int maxThreadNum) {
		urlQueue = new Queue<String>();
		this.maxCrawlThreadNum = maxThreadNum;
		this.timeOut = Config.DEFAULT_TIMEOUT;
	}
	
	public CrawlManager2(int maxThreadNum, int timeOut) {
		urlQueue = new Queue<String>();
		this.maxCrawlThreadNum = maxThreadNum;
		this.timeOut = timeOut;
	}
	
	public void setAnalyzeManager(AnalyzeManager2 mgr) {
		this.analyzeManager = mgr;
	}
	
	public void addSeedUrl(String url) {
		urlQueue.add(url);
		analyzeManager.tryAddUrl(url);
	}
	
	public void addSeedUrls(String[] url) {
		for(int i = 0; i < url.length; i++) {
			urlQueue.add(url[i]);
			analyzeManager.tryAddUrl(url[i]);
		}
	}
	
	public void addUrl(String url) {
		System.out.println("URL queue length: " + urlQueue.size());
		synchronized(urlQueue) {
			if(urlQueue.size() >= maxQueueLength)
				return;
			urlQueue.add(url);
		}
		notifyCrawler();
	}
	
	private void tryAddRedirectUrl(String url) {
		if(analyzeManager.tryAddUrl(url))
			addUrl(url);
	}
	
	/**
	 * 
	 * @param url
	 * 
	 * @deprecated
	 */
	private void addRedirectUrl(String url) {
		if(analyzeManager.noDuplicateUrl(url)) {
			addUrl(url);
		}
	}
	
	private void notifyCrawler() {
		synchronized(this) {
			notify();
		}
	}
	
	public void start() {
		fetcher = new Fetcher[maxCrawlThreadNum];
		System.out.println("Maximum crawling thread number: " + maxCrawlThreadNum);
		for(int i = 0; i < maxCrawlThreadNum; i++) {
			fetcher[i] = new Fetcher();
			fetcher[i].start();
		}
	}
}
