package edu.hit.crawler;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;

import edu.hit.crawler.io.CrawItem;
import edu.hit.crawler.mapred.PartitionByHost;
import edu.hit.crawler.util.CrawItemComparator;

/**
 * Driver for Crawler hadoop program. SelectDriver will select appropriate
 * number urls to every reduce node.
 * 
 * @author zzc (zzc3615@gmail.com)
 */
@SuppressWarnings("deprecation")
public class SelectDriver extends Configured implements Tool {
	public static Logger logger = Logger.getLogger(SelectDriver.class);

	public static class SelectMapper extends
			Mapper<Text, CrawItem, Text, CrawItem> {

		@Override
		public void map(Text key, CrawItem value, Context context)
				throws IOException, InterruptedException {

			// url 过滤和正规划
			// TODO: 是否可以去掉，inject和update阶段已经使用
			/*
			 * String url = key.toString(); if (0 == url.length() ||
			 * (!URLFilter.filter(key.toString()))) { logger.info(url +
			 * " isn't a legal url!"); return; } url =
			 * URLCanonicalizer.getCanonicalURL(url); if(null == url ||
			 * url.length() == 0) return; value.setUrl(url);
			 */
			// 根据crawl_db中的CrawItem的statu来判断是否select
			int statu = value.getStatus();
			if (statu == CrawItem.SUCCESS || statu == CrawItem.NOTFOUND
					|| statu == CrawItem.GONE) 
				return;
			context.write(key, value);

		}
	}

	public static class SelectReducer extends
			Reducer<Text, CrawItem, Text, CrawItem> {

		public static int count = 0;
		public static int per_host_limit = 100000; // 每个主机的url下载数限制
		public static int top_n = 600000; // top-n
		private static long start = System.currentTimeMillis();
		// <host, num>
		private static Map<String, Integer> hostTable = new HashMap<String, Integer>();
		String uiservlet;
		int reportInterval;
		boolean isSendInfo;

		// private MultipleOutputs<FloatWritable, CrawItem> mos;

		@Override
		public void setup(Context context) {
			// mos = new MultipleOutputs<FloatWritable, CrawItem>(context);
			System.out.println("org.work.crawler.maxthread.samehost is : "
					+ context.getConfiguration().get(
							"org.work.crawler.maxthread.samehost"));
			System.out.println("mapred.child.java.opts : "
					+ context.getConfiguration().get("mapred.child.java.opts"));
			System.out.println("dfs.replication : "
					+ context.getConfiguration().get("dfs.replication"));
			
			uiservlet = context.getConfiguration().get("org.work.crawler.ui.servlet");
			reportInterval = context.getConfiguration().getInt("org.work.crawler.reportInterval", 2*1000);
			System.out.println("org.work.crawler.reportInterval : "
					+ reportInterval);
			isSendInfo = context.getConfiguration().getBoolean("org.work.crawler.isSendInfo", true);
			System.out.println("org.work.crawler.isSendInfo : "
					+ isSendInfo);

		}

		int select = 0;
		int unselected = 0;

		@Override
		public void reduce(Text key, Iterable<CrawItem> values, Context context) {

			if (count > top_n)
				return;
			try {
				String host;
				int now_num;
				for (CrawItem value : values) {
					
					long now = System.currentTimeMillis();
					if (isSendInfo && now - start > reportInterval) {
						try {
							DefaultHttpClient httpclient = new DefaultHttpClient();
							String uri = uiservlet + "?state=selecting&count=" + Integer.toString(count);

							HttpPost post = new HttpPost(uri);
							HttpResponse response = httpclient.execute(post);
							System.out.println(response.getStatusLine());
							httpclient.getConnectionManager().shutdown();
						} catch (Exception e) {
							// TODO Auto-generated catch block
							e.printStackTrace();
						}

						start = now;
					}
					host = new URL(value.getUrl()).getHost();
					if (null == host) {
						logger.error("host is null!");
						continue;
					}
					if (!hostTable.containsKey(host)) {
						hostTable.put(host, new Integer(1));
					}
					now_num = hostTable.get(host);
					if (count > top_n || now_num > per_host_limit) {
						unselected++;
						/*
						 * mos.write(new FloatWritable(value.getScore()), value,
						 * "unselected");
						 */
					} else {
						select++;
						hostTable.put(host, now_num + 1);
						/*
						 * mos.write(new FloatWritable(value.getScore()), value,
						 * "selected");
						 */
						context.write(new Text(value.getUrl()), value);
					}
					count++;
					break;
				}
			} catch (MalformedURLException e) {
				e.printStackTrace();
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}

		}

		public void cleanup(Context context) throws IOException {

			/*
			 * for (Map.Entry<String, Integer> entry : hostTable.entrySet()) {
			 * System.out.println("host " + entry.getKey() + " : " +
			 * entry.getValue()); }
			 */

			System.out.println("selected : unselected " + select + " " + unselected);
			System.out.println("total : " + count);
			/*
			 * try { mos.close(); } catch (InterruptedException e) {
			 * e.printStackTrace(); }
			 */
		}
	}

	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new SelectDriver(), args);
		System.exit(res);
	}

	@Override
	public int run(String[] arg0) throws Exception {

		// config a job and start it
		Configuration conf = getConf();
		Job job = new Job(conf);
		job.setJobName("Select");
		job.setJarByClass(Crawler.class);
		job.setMapperClass(SelectMapper.class);
		job.setPartitionerClass(PartitionByHost.class);
		job.setReducerClass(SelectReducer.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(CrawItem.class);

		// job.setSortComparatorClass(DecComparator.class);

		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);

		// MultipleOutputs.setCountersEnabled(job, true);
		/*
		 * MultipleOutputs.addNamedOutput(job, "unselected",
		 * SequenceFileOutputFormat.class, Text.class, CrawItem.class);
		 * MultipleOutputs.addNamedOutput(job, "selected",
		 * SequenceFileOutputFormat.class, FloatWritable.class, CrawItem.class);
		 */

		int r_num = conf.getInt("org.work.crawler.num.reduceTask", 6);
		job.setNumReduceTasks(r_num);

		String workdir = conf.get("org.work.crawler.dir", "crawler/");
		FileSystem fs = FileSystem.get(conf);

		long latest = 0;
		Path in = null;
	/*	for (FileStatus stat : fs.listStatus(new Path(workdir + "crawl_db/"))) {
			if (stat.getModificationTime() > latest) {
				in = stat.getPath();
				latest = stat.getModificationTime();
			}
		}*/
		FileInputFormat.addInputPath(job, new Path(workdir + "crawl_db/"));

		Path out = new Path(workdir
				+ "generate/"
				+ new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss").format(System
						.currentTimeMillis()));
		SequenceFileOutputFormat.setOutputPath(job, out);

		int res = job.waitForCompletion(true) ? 0 : 1;

		if (res == 0) {
			System.out.println("select done.");
			/*
			 * logger.info("process the unselected to unselected_tmp/"); Job
			 * job1 = new Job(conf); job1.setJarByClass(SelectDriver.class);
			 * job1.setJobName("unselcted");
			 * job1.setMapperClass(UpdateUnselected.class);
			 * job1.setNumReduceTasks(0);
			 * 
			 * job1.setMapOutputKeyClass(Text.class);
			 * job1.setMapOutputValueClass(CrawItem.class);
			 * 
			 * job1.setInputFormatClass(SequenceFileInputFormat.class);
			 * job1.setOutputFormatClass(SequenceFileOutputFormat.class);
			 * 
			 * FileStatus[] status = fs.listStatus(out); int c = 0; for
			 * (FileStatus s : status) { if
			 * (s.getPath().getName().startsWith("unselected")) { ++c;
			 * FileInputFormat.addInputPath(job1, s.getPath()); } }
			 * 
			 * Path outputDir = new Path(workdir + "unselected_tmp/"); if (0 ==
			 * c) {
			 * logger.info("No unselected url! Produce a empty dir unselected_tmp/"
			 * ); fs.mkdirs(outputDir); return res; }
			 * SequenceFileOutputFormat.setOutputPath(job1, outputDir);
			 * 
			 * res = job1.waitForCompletion(true) ? 0 : 1; if (res == 0)
			 * logger.info("update unselected finished"); else
			 * logger.info("update unselected failure!");
			 */

		} else {
			System.out.println("select failed.");
		}

		return res;
	}
}