package edu.uab.enronemail;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import edu.uab.enronemail.mapreduce.IndexCreatorMapper;

public class EnronEmail {
	public static void main(String[] args) {

		JobConf jobConf = new JobConf(EnronEmail.class);
		Job job = null;
		try {
			job = new Job(jobConf, "Bulk Indexing of Emails");
		} catch (IOException io) {
			io.printStackTrace();
			System.exit(1);
		}
		job.setJarByClass(EnronEmail.class);
		job.setMapOutputKeyClass(NullWritable.class);
		job.setMapOutputValueClass(NullWritable.class);

		job.setMapperClass(IndexCreatorMapper.class);

		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setNumReduceTasks(0);

		try {
			DistributedCache.addCacheFile(new URI("/user/spoudel/stopwords.txt"),job.getConfiguration());
		} catch (URISyntaxException e1) {
			e1.printStackTrace();
			throw new RuntimeException(e1);
		}

		FileInputFormat.addInputPath((JobConf) job.getConfiguration(),
				new Path("/user/spoudel/input/chunk-0"));

		FileOutputFormat.setOutputPath(job, new Path("/user/spoudel/output1"));

		try {
			if (!job.waitForCompletion(true)) {
				System.out.println("Problem executing map reduce");
			}
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			e.printStackTrace();
		}
	}
}
