package snippets.cse524.activeDates;

import java.io.IOException;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Iterator;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import edu.umd.cloud9.collection.wikipedia.WikipediaPage;
import edu.umd.cloud9.collection.wikipedia.WikipediaPageInputFormat;
import edu.umd.cloud9.io.pair.PairOfIntString;
import edu.umd.cloud9.io.pair.PairOfInts;
import edu.umd.cloud9.io.pair.PairOfStringInt;

public class CountDatesInWikipedia extends Configured implements Tool {

	public static class Map extends MapReduceBase implements
			Mapper<LongWritable, WikipediaPage, PairOfStringInt, IntWritable> {
		private static final IntWritable ONE = new IntWritable(1);

		@Override
		public void map(LongWritable key, WikipediaPage page,
				OutputCollector<PairOfStringInt, IntWritable> collector,
				Reporter reporter) throws IOException {

			if (!page.isArticle()) {
				return;
			}

			String content = null;

			try {
				content = page.getWikiMarkup();
			} catch (Exception e) {
				return;
			}

			WikipediaDateExtractor wde = new WikipediaDateExtractor(content, page.getTitle());

			for (GregorianCalendar cal : wde.getAllDates()) {
				PairOfStringInt docidYear = new PairOfStringInt();
				docidYear.set(page.getDocid(), cal.get(Calendar.YEAR));
				collector.collect(docidYear, ONE);
			}
		}
	}

	public static class Reduce extends MapReduceBase implements
			Reducer<PairOfStringInt, IntWritable, PairOfStringInt, Text> {
		@Override
		public void reduce(PairOfStringInt key, Iterator<IntWritable> values,
				OutputCollector<PairOfStringInt, Text> collector,
				Reporter reporter) throws IOException {
			int _total = 0;
			while (values.hasNext()) {
				_total += values.next().get();
			}

			collector.collect(key, new Text((Integer.toString(_total))));
		}
	}

	public static void main(String[] args) throws Exception {
		ToolRunner.run(new CountDatesInWikipedia(), args);
	}

	@Override
	public int run(String[] args) throws Exception {
		String inputPath = args[0];
		String outputPath = args[1];

		JobConf conf = new JobConf(getConf(), CountDatesInWikipedia.class);
		conf.setJobName(String.format("DemoCountWikipediaPages[%s: %s]",
				inputPath, outputPath));

		conf.setNumMapTasks(36);
		conf.setNumReduceTasks(36);

		FileInputFormat.setInputPaths(conf, new Path(inputPath));
		FileOutputFormat.setOutputPath(conf, new Path(outputPath));

		conf.setInputFormat(WikipediaPageInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);

		conf.setMapperClass(Map.class);
		conf.setReducerClass(Reduce.class);

		// Set the outputs for the Map
		conf.setMapOutputKeyClass(PairOfStringInt.class);
		conf.setMapOutputValueClass(IntWritable.class);

		// Set the outputs for the Job
		conf.setOutputKeyClass(PairOfStringInt.class);
		conf.setOutputValueClass(Text.class);

		JobClient.runJob(conf);

		return 0;
	}

}
