package edu.hit.crawler.mapred;

import java.io.IOException;
import java.net.SocketTimeoutException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;

import edu.hit.crawler.http.HttpUtil;
import edu.hit.crawler.io.CrawItem;
import edu.hit.crawler.io.FetchOutput;
import edu.hit.crawler.util.CrawItemComparator;

public class CrawlReducer extends
		Reducer<Text, CrawItem, CrawItem, FetchOutput> {

	public static Logger logger = Logger.getLogger(CrawlReducer.class);
	/**
	 * we need to split all url into several blocks if they are too many
	 */
	private final static int BLOCK_SIZE = 1024;
	private static long start = System.currentTimeMillis();

	/**
	 * start a muti-thread reducer to download all given urls
	 */
	@Override
	public void run(
			final org.apache.hadoop.mapreduce.Reducer<Text, CrawItem, CrawItem, FetchOutput>.Context context)
			throws IOException, InterruptedException {

		ArrayList<CrawItem> list = new ArrayList<CrawItem>();

		while (context.nextKey()) {
			for (CrawItem it : context.getValues()) {
				CrawItem value = new CrawItem(it);
				list.add(value);
				break;
			}
		}

		/*
		 * System.out.println("list size is : " + list.size()); for (CrawItem it
		 * : list) { System.out.println(it.getUrl()); }
		 */
		// Optimize the download sequence, for more info, see HostShuffle's doc
		// TODO: which is better?
		// List<CrawItem> downloadList = HostShuffle.shuffle(list);
		System.out.println(context.getConfiguration());
		System.out.println("org.work.crawler.maxthread.samehost is : "
				+ context.getConfiguration().get(
						"org.work.crawler.maxthread.samehost"));
		System.out.println("org.work.crawler.worker : "
				+ context.getConfiguration().get("org.work.crawler.worker"));
		System.out.println("dfs.replication : "
				+ context.getConfiguration().get("dfs.replication"));
		
		String uiservlet = context.getConfiguration().get("org.work.crawler.ui.servlet");
		System.out.println("org.work.crawler.ui.servlet : " + uiservlet);
		int reportInterval = context.getConfiguration().getInt("org.work.crawler.reportInterval", 2*1000);
		boolean isSendInfo = context.getConfiguration().getBoolean("org.work.crawler.isSendInfo", false);
		Collections.sort(list, new CrawItemComparator());
		List<CrawItem> downloadList = list;
		// put all the download operation into a threadpool
		BlockingQueue<Runnable> queue = new SynchronousQueue<Runnable>();
		// Read the concurrency setting
		int numWorker = context.getConfiguration().getInt(
				"org.joy.crawler.worker", 100);

		// semaphore used to limit the working threads
		final Semaphore s = new Semaphore(numWorker);
		// semaphore locker will separate the input urls one block at a time.
		final Semaphore sBlock = new Semaphore(BLOCK_SIZE);

		ThreadPoolExecutor threadPool = new ThreadPoolExecutor(BLOCK_SIZE,
				BLOCK_SIZE * 2, Long.MAX_VALUE, TimeUnit.SECONDS, queue);
		// fetch time table
		final Map<String, Semaphore> intervalBlocker = Collections
				.synchronizedMap(new HashMap<String, Semaphore>());

		final long interval = context.getConfiguration().getLong(
				"org.joy.crawler.interval", 100);
		// fill the pool one block a time
		logger.info(" This reduce node's downloadList size is : "
				+ downloadList.size());

		System.out.println("---------" + downloadList.size() + "------------");

		int count = 0;
		int res = 0;
		for (final CrawItem it : downloadList) {
			
			if (res % 100 == 0) {
				System.out.println(new Date() + " has fetched  " + res + "urls");
			}
			res++;
			count++;
			long now = System.currentTimeMillis();
			if (isSendInfo && now - start > reportInterval) {
				try {
					DefaultHttpClient httpclient = new DefaultHttpClient();

					String uri = uiservlet + "?state=crawling&count=" + Integer.toString(count);
					HttpPost post = new HttpPost(uri);
					httpclient.execute(post);
					//HttpResponse response = httpclient.execute(post);
					//logger.info(response.getStatusLine());
					httpclient.getConnectionManager().shutdown();
					count = 0;
					start = now;
				} catch (Exception e) {
					//Yes,I know what i am doing.
					//e.printStackTrace();
				}
			}

			final String url = it.getUrl();
			sBlock.acquire();
			if (intervalBlocker.get(new URL(url).getHost()) == null) {
				int max = context.getConfiguration().getInt(
						"org.joy.crawler.maxthread.samehost", 50);
				intervalBlocker.put(new URL(url).getHost(), new Semaphore(max));

			}
			threadPool.execute(new Runnable() {
				@Override
				public void run() {
					long startTime = 0;
					try {
						// acquire the concurrency locker
						s.acquire();

						// check last visit
						String host = new URL(url).getHost();
						if (intervalBlocker.get(host) != null) {
							intervalBlocker.get(host).acquire();
							// set the start time
							startTime = System.currentTimeMillis();
						}

						// System.out.println("downloading    : " + url);
						FetchOutput fetchout = HttpUtil.getFetchOutput(url);
						// TODO: DO WE NEED TO SYNC VAR context?
						it.setLastFetchTime(System.currentTimeMillis());

						synchronized (context) {
							context.setStatus(url);
							context.write(it, fetchout);
						}
					} catch (Exception e) {

						if (e instanceof UnknownHostException) {
							synchronized (context) {
								try {
									context.setStatus(url);
									context.write(it,
											new FetchOutput(it.getUrl(),
													FetchOutput.NOTFOUND));
								} catch (Exception e1) {
									e1.printStackTrace();
								}
							}
						} else if (e instanceof SocketTimeoutException) {
							synchronized (context) {
								try {
									context.setStatus(url);
									context.write(it,
											new FetchOutput(it.getUrl(),
													FetchOutput.RETRY));
								} catch (Exception e1) {
									e1.printStackTrace();
								}
							}
						}
						//e.printStackTrace();

					} finally {
						try {
							// release the host interval blocker, if times
							// up
							long sleepTime = interval
									- (System.currentTimeMillis() - startTime);

							if (sleepTime > 0) {
								Thread.sleep(sleepTime);
							}
							intervalBlocker.get(new URL(url).getHost())
									.release();

						} catch (Exception e) {
							e.printStackTrace();
						}
						s.release();
						sBlock.release();
					}
				}

			});
		}

		// wait for completion
		threadPool.shutdown();
		threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);

		System.out.println("sBlock : " + sBlock.availablePermits());
		System.out.println("s : " + s.availablePermits());
	};
}