package org.correct.crawler;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.correct.util.Counter;
import org.correct.util.Queue;

/**
 * To start CrawlManager, the following steps should be done before the 
 * <code>start()</code> method is called: <br>
 * 1. set an <code>AnaylzeManager</code> <br>
 * 2. add at least one seed URL to it <br>
 * 
 * @author Li Yujia
 *
 * @deprecated
 */
public class CrawlManager extends Thread {
	private Queue<String> urlQueue;
	private AnalyzeManager analyzeManager;
	private Counter counter;
	private final int maxCrawlThreadNum;
	private final Pattern pattern = Pattern.compile("charset=([\\w-]+)");
	
	public CrawlManager() {
		urlQueue = new Queue<String>();
		counter = new Counter();
		this.maxCrawlThreadNum = 20;
	}
	
	public CrawlManager(int maxThreadNum) {
		urlQueue = new Queue<String>();
		counter = new Counter();
		this.maxCrawlThreadNum = maxThreadNum;
	}
	
	public void setAnalyzeManager(AnalyzeManager mgr) {
		this.analyzeManager = mgr;
	}
	
	public void run() {
		try {
			while(true) {
				while(counter.val() >= maxCrawlThreadNum || urlQueue.peek() == null)
					synchronized(this) {
						wait();
					}
				fetch();
			}
		} catch(InterruptedException e) {
			e.printStackTrace();
		}
	}
	
	private void fetch() {
		counter.inc();
		final String urlStr = urlQueue.poll();
		if(urlStr == null) {
			counter.dec();
			notifyCrawler();
			return;
		}
		
		new Thread() {
			public void run() {
				try {
					URL url = null;
					
					if(!(urlStr.startsWith("http://") || urlStr.startsWith("https://"))) {
						url = new URL("http://" + urlStr);
					} else {
						url = new URL(urlStr);
					}
					
					System.out.println(this + "Fetching " + url + " ...");
					
					URLConnection conn = url.openConnection();
					conn.setConnectTimeout(2000);
					conn.setReadTimeout(2000);
					
					conn.connect();
					
					String typeStr = conn.getContentType();
					String encoding = "gbk";
					if(typeStr != null) {
						Matcher m = pattern.matcher(typeStr);
						if(m.find())
							encoding = m.group(1);
					}
					
					/*
					System.out.println("Content Length: " + conn.getContentLength());
					System.out.println("Content Type: " + conn.getContentType());
					System.out.println("Content Encoding: " + conn.getContentEncoding());
					System.out.println("Last Modified(long): " + conn.getLastModified());
					System.out.println("Last Modified: " + new Date(conn.getLastModified()));
					*/
					
					BufferedReader r = new BufferedReader(
							new InputStreamReader(conn.getInputStream(), encoding));
					
					String s = null;
					String html = "";
					int lineCnt = 0;
					while((s = r.readLine()) != null) {
						lineCnt++;
						// System.out.println(lineCnt + "\t" + s);
						html += s + "\n";
					}
					
					analyzeManager.addHtml(html, encoding);
					
				} catch (MalformedURLException e) {
					e.printStackTrace();
				} catch (SocketTimeoutException e) {
					e.printStackTrace();
				} catch (IOException e) {
					e.printStackTrace();
				} 
				
				counter.dec();
				notifyCrawler();
			}
		}.start();
	}
	
	public void addUrl(String url) {
		urlQueue.add(url);
		notifyCrawler();
	}
	
	private void notifyCrawler() {
		synchronized(this) {
			notify();
		}
	}
	
	public void addSeedUrl(String url) {
		urlQueue.add(url);
	}
	
	public void addSeedUrls(String[] url) {
		for(int i = 0; i < url.length; i++)
			urlQueue.add(url[i]);
	}
}
