package edu.hit.crawler;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;

import edu.hit.crawler.io.CrawItem;
import edu.hit.crawler.util.URLCanonicalizer;
import edu.hit.crawler.util.URLFilter;

/**
 * Inject urls to db.
 */
@SuppressWarnings("deprecation")
public class InjectDriver extends Configured implements Tool {

	public static Logger logger = Logger.getLogger(InjectDriver.class);

	public static class InjectMapper extends
			Mapper<LongWritable, Text, Text, CrawItem> {

		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {

			// 统一格式为<Text, DocumentWritable>
			if (null == value || 0 == value.getLength())
				return;
			if (!URLFilter.filter(value.toString())) {
				logger.info(value.toString() + " isn't a legal url!");
				return;
			}

			String url = URLCanonicalizer.getCanonicalURL(value.toString());

			if (url != null && url.length() > 0) {
				context.write(new Text(url),
						new CrawItem(url, CrawItem.UNCRAWL));
			}
		}
	}

	public static class InjectReducer extends
			Reducer<Text, CrawItem, Text, CrawItem> {

		@Override
		public void reduce(Text key, Iterable<CrawItem> values, Context context)
				throws IOException, InterruptedException {

			for (CrawItem value : values) {
				context.write(key, value);
				break;
			}

		}
	}


	public static void main(String[] args) throws Exception {

		PropertyConfigurator.configure("conf/log4j.properties");
		int res = ToolRunner.run(new Configuration(), new InjectDriver(), args);
		System.exit(res);
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
	 */
	/*
	 * (non-Javadoc)
	 * 
	 * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
	 */
	@Override
	public int run(String[] args) throws Exception {
		// configure a job and start it
		Configuration conf = getConf();
		Job job = new Job(conf, "Inject");
		job.setJarByClass(Crawler.class);
		job.setMapperClass(InjectMapper.class);
		job.setReducerClass(InjectReducer.class);

		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(CrawItem.class);
		job.setOutputFormatClass(MapFileOutputFormat.class);

		job.setNumReduceTasks(2);

		String workdir = conf.get("org.work.crawler.dir", "crawler//");
		FileInputFormat.addInputPath(job, new Path(workdir + "in"));
		Path out = new Path(workdir + "crawl_db_new/");

		MapFileOutputFormat.setOutputPath(job, out);

		int res = job.waitForCompletion(true) ? 0 : 1;

		if (res == 0) {
			System.out.println("Inject done!");
			
			FileSystem fs = FileSystem.get(conf);
			
			if (fs.exists(new Path(workdir + "crawl_db"))) {
				res = ToolRunner.run(getConf(), new DbMergerDriver(), args);
			
			} else {
				fs.rename(out, new Path(workdir + "crawl_db"));
			}
			
		
			
		} else {
			System.out.println("Inject failed.");
		}

		return res;
	}
}
