package org.joy.pagerank;

import java.util.ArrayList;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.joy.crawler.OutlinksWritable;

/**
 * Normalize drive starts two job. One is to normalize all the outlinks of
 * Crawler systems, then convert them to plain text format and the other is to
 * sort and merge the normalized links <br/>
 * NOTE: one of the most important task of the Prepare job is to deal with the
 * "HTTP Redirect", which means, all the redirected URLs, will be replaced with
 * their "real" URL. For reasons, please refer to other documents of this
 * program
 * 
 * @author Administrator
 * 
 */
public class NormalizeDriver extends Configured implements Tool {
	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new Configuration(), new NormalizeDriver(),
				args);

		System.exit(res);
	}

	/**
	 * Mapper for NormalizeDriever, this Mapper simply inverse all the input
	 * outlinks and its source. When the outlinks is empty for some given source
	 * links, we will write a "NULL" to its output, showing it has no outlinks.<br/>
	 * Another situation is the "Redirected Outlinks" object. They are the
	 * object that demonstrate the redirect relation between two URLs, in this
	 * case, we will write them to the output without the inverse and mark them
	 * by "red:" at the beginning of the REAL URL.
	 * 
	 * @author Administrator
	 * 
	 */
	static class NormalizeMapper extends
			Mapper<Text, OutlinksWritable, Text, Text> {
		protected void map(
				Text key,
				OutlinksWritable value,
				org.apache.hadoop.mapreduce.Mapper<Text, OutlinksWritable, Text, Text>.Context context)
				throws java.io.IOException, InterruptedException {
			if (value.isRedirected()) {
				context.write(key, new Text("red:" + value.getOutlinks()[0]));
				return;
			}
			if (value.getOutlinks().length == 0) {
				context.write(new Text("NULL"), key);
			} else {
				for (String link : value.getOutlinks()) {
					context.write(new Text(link), key);
					System.out.println(link);
				}
			}
		};
	}

	/**
	 * The reducer for NormalizeDriver, this reducer's input key is the outlink.
	 * Its value is source links that points to its key, and also the key's real
	 * URL after the redirection.
	 * 
	 * the output of this reducer will inverse the input again and replace the
	 * outlinks with its real URL if it has been redirected.
	 * 
	 * @author Administrator
	 * 
	 */
	static class NormalizeReducer extends
			org.apache.hadoop.mapreduce.Reducer<Text, Text, Text, Writable> {
		protected void reduce(
				Text key,
				java.lang.Iterable<Text> value,
				org.apache.hadoop.mapreduce.Reducer<Text, Text, Text, Writable>.Context context)
				throws java.io.IOException, InterruptedException {
			String redirectTo = key.toString();
			ArrayList<String> values = new ArrayList<String>();
			for (Text t : value) {
				values.add(t.toString());
			}
			for (String t : values) {
				if (t.startsWith("red:")) {
					System.out.println(t);
					redirectTo = t.toString().substring("red:".length());
					break;
				}
			}
			for (String t : values) {
				if (!t.startsWith("red:")) {
					context.write(new Text(t), new Text(redirectTo));
				}
			}
		};
	}

	@Override
	public int run(String[] arg0) throws Exception {
		// config a job and start it
		Configuration conf = getConf();
		Job job = new Job(conf, "Rank");
		job.setJarByClass(NormalizeDriver.class);
		job.setMapperClass(NormalizeMapper.class);
		job.setReducerClass(NormalizeReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setInputFormatClass(SequenceFileInputFormat.class);

		Path inPath = new Path("links");

		for (FileStatus s : FileSystem.get(conf).listStatus(inPath)) {
			Path sub = s.getPath();
			FileInputFormat.addInputPath(job, sub);
		}

		FileSystem fs = FileSystem.get(conf);
		fs.delete(new Path("in"), true);
		FileOutputFormat.setOutputPath(job, new Path("in"));
		int res = job.waitForCompletion(true) ? 0 : 1;
		// start the sort job
		if (res == 0) {
			job = new Job(conf, "sort");
			job.setJarByClass(NormalizeDriver.class);
			job.setInputFormatClass(KeyValueTextInputFormat.class);
			job.setReducerClass(NormalizeSorter.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(Text.class);
			FileInputFormat.addInputPath(job, new Path("in"));
			fs.delete(new Path("in_sort"), true);
			FileOutputFormat.setOutputPath(job, new Path("in_sort"));
			res = job.waitForCompletion(true) ? 0 : 1;
			if (res == 0) {
				fs.delete(new Path("in"), true);
				fs.rename(new Path("in_sort"), new Path("in"));
				return 0;
			}
		}
		return res;
	}

	/**
	 * Sort the normalized links, and output them with the plain text format
	 * 
	 * @author Administrator
	 * 
	 */
	static class NormalizeSorter extends Reducer<Text, Text, Text, RankRecord> {
		protected void reduce(
				Text key,
				java.lang.Iterable<Text> value,
				org.apache.hadoop.mapreduce.Reducer<Text, Text, Text, RankRecord>.Context context)
				throws java.io.IOException, InterruptedException {

			ArrayList<String> array = new ArrayList<String>();
			for (Text t : value) {
				if (!t.toString().equals("NULL")) {
					array.add(t.toString());
				}
			}
			RankRecord rec = new RankRecord(0.0, array.toArray(new String[0]));
			context.write(key, rec);
		};
	}
}
