package nlp;

import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.HashSet;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.tartarus.snowball.ext.porterStemmer;

import edu.stanford.nlp.tagger.maxent.MaxentTagger;

public class FeatureGeneratorMR {

	private static class FeatureGeneratorMapper extends
			Mapper<Object, Text, Text, NullWritable> implements Configurable {

		private Configuration conf;

		public void map(Object key, Text value, Context context) {
			String[] tokens = value.toString().split(",");
			if (tokens.length != 2) {
				return;
			}

			context.setStatus("Processing: " + tokens[1]);
			String textString = HdfsFileUtil.ReadFileContent(tokens[1], conf);
			TextDocument document = new TextDocument(textString, tokens[0]);
			String serializedStr = document.SerializeFeatures();
			try {
				context.write(new Text(serializedStr), NullWritable.get());
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;

			porterStemmer stemmer = new porterStemmer();

			// Mood words.
			String[] words = FileUtil.ReadFileContent("moodWords.txt").split(
					"\n");
			HashSet<String> moodWords = new HashSet<String>();
			for (String word : words) {
				stemmer.setCurrent(word.toLowerCase());
				stemmer.stem();
				moodWords.add(stemmer.getCurrent());
			}

			// Stop words
			words = FileUtil.ReadFileContent("stopWords.txt").split("\n");
			HashSet<String> stopWords = new HashSet<String>();
			for (String word : words) {
				stemmer.setCurrent(word.toLowerCase());
				stemmer.stem();
				stopWords.add(stemmer.getCurrent());
			}

			// TfIaf map
			String[] lines = FileUtil.ReadFileContent("tfiaf.txt").split("\n");
			HashMap<String, Double> tfiafMap = new HashMap<String, Double>();
			for (String line : lines) {
				String[] tokens = line.split("\t");
				tfiafMap.put(tokens[0], Double.parseDouble(tokens[1]));
			}

			// TfIafTpf map
			lines = FileUtil.ReadFileContent("tfiaftpf.txt").split("\n");
			HashMap<String, Double> tfiaftpfMap = new HashMap<String, Double>();
			for (String line : lines) {
				String[] tokens = line.split("\t");
				tfiaftpfMap.put(tokens[0], Double.parseDouble(tokens[1]));
			}

			try {
				TextDocument.tagger = new MaxentTagger("taggerModel.txt");
			} catch (IOException e) {
				e.printStackTrace();
			} catch (ClassNotFoundException e) {
				e.printStackTrace();
			}
			TextDocument.moodWordSet = moodWords;
			TextDocument.stopWordSet = stopWords;
			TextDocument.tfiafMap = tfiafMap;
      TextDocument.tfiaftpfMap = tfiaftpfMap;
		}
	}

	private static class FeatureGeneratorReducer extends
			Reducer<Text, NullWritable, Text, NullWritable> implements
			Configurable {

		private Configuration conf;

		public void reduce(Text key, Iterable<NullWritable> values,
				Context context) {
			try {
				context.write(key, NullWritable.get());
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
		}
	}

	public static void main(String[] args) throws Exception {
		String[] otherArgs = new GenericOptionsParser(args).getRemainingArgs();
		if (otherArgs.length != 7) {
			System.out.println("Usage: <program> <input> <output> "
					+ "<stopwords> <moodwords> <tfiaf> <tfiaf-tpf> <tagger-model>");
			System.exit(-1);
		}

		Configuration conf = new Configuration();
		DistributedCache.createSymlink(conf);
		DistributedCache.addCacheFile(new URI(otherArgs[2]), conf);
		DistributedCache.addCacheFile(new URI(otherArgs[3]), conf);
		DistributedCache.addCacheFile(new URI(otherArgs[4]), conf);
		DistributedCache.addCacheFile(new URI(otherArgs[5]), conf);
		DistributedCache.addCacheFile(new URI(otherArgs[6]), conf);

		conf.set("mapred.child.java.opts", "-Xmx2000M");
		conf.set("mapred.task.timeout", "12000000");
		Job job = new Job(conf, "FeatureGenerator");

		FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
		FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

		job.setJarByClass(FeatureGeneratorMR.class);
		job.setMapperClass(FeatureGeneratorMR.FeatureGeneratorMapper.class);
		job.setReducerClass(FeatureGeneratorMR.FeatureGeneratorReducer.class);
		job.setNumReduceTasks(48);
		NLineInputFormat.setNumLinesPerSplit(job, 1);
		job.setInputFormatClass(NLineInputFormat.class);

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(NullWritable.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);

		job.waitForCompletion(true);
		System.exit(0);
	}
}
