package nlp;

import java.io.IOException;
import java.util.HashMap;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.tartarus.snowball.ext.porterStemmer;

import weka.core.tokenizers.NGramTokenizer;

public class TfIafGenerator {

	private static class TfIafGeneratorMapper extends
			Mapper<Object, Text, Text, Text> implements Configurable {

		private Configuration conf;
		private HashMap<String, String> topicMap;

		// Create NGram features.
		private void OutputNgrams(String authorName, String fileName,
				Context context) throws InterruptedException {
			String textString = HdfsFileUtil.ReadFileContent(fileName, conf);

			// Tokenize the text.
			int ngramSize = 3;
			NGramTokenizer ngramTokenizer = new NGramTokenizer();
			ngramTokenizer.tokenize(textString);
			ngramTokenizer.setNGramMaxSize(ngramSize);
			ngramTokenizer.setNGramMinSize(ngramSize);

			// TopicFrequency specific code.
			String topic = "NA";
			if (conf.getBoolean("topicFreq", false)) {
				if (!topicMap.containsKey(fileName)) {
					return;
				}
				topic = topicMap.get(fileName);
			}

			Text outputText = new Text(authorName + "" + topic + ""
					+ textString.length());
			porterStemmer stemmer = new porterStemmer();
			while (ngramTokenizer.hasMoreElements()) {
				String ngram = (String) ngramTokenizer.nextElement();
				if (AuthorNameFilter.ContainsAuthorName(ngram)) {
					continue;
				}
				stemmer.setCurrent(ngram.toLowerCase());
				stemmer.stem();
				String stemmedNgram = stemmer.getCurrent();
				try {
					context.write(new Text(stemmedNgram), outputText);
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}

		public void map(Object key, Text value, Context context) {
			String[] tokens = value.toString().split(",");
			if (tokens.length != 2) {
				return;
			}
			context.setStatus("Processing: " + tokens[1]);
			try {
				OutputNgrams(tokens[0], tokens[1], context);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
			String topicMapFilename = conf.get("topicMap", "");
			if (topicMapFilename.length() > 0) {
				String topicContents = HdfsFileUtil.ReadFileContent(
						topicMapFilename, conf);
				topicMap = new HashMap<String, String>();
				for (String line : topicContents.split("\n")) {
					String[] tokens = line.split("\t");
					topicMap.put(tokens[0], tokens[1]);
				}
			}
		}
	}

	private static class TfIafGeneratorReducer extends
			Reducer<Text, Text, Text, Text> implements Configurable {

		private Configuration conf;

		public void reduce(Text key, Iterable<Text> values, Context context) {
			int totalCount = 0;
			Set<String> authorSet = new TreeSet<String>();
			HashMap<String, Double> authorNgramFreq = new HashMap<String, Double>();
			HashMap<String, Set<String>> authorNgramTopicMap = new HashMap<String, Set<String>>();
			for (Text value : values) {
				String[] tokens = value.toString().split("");
				String authorName = tokens[0];
				String topic = tokens[1];
				Integer docLen = Integer.parseInt(tokens[2]);
				double normalizedFreq = (double) 1 / (double) docLen;
				authorSet.add(authorName);
				if (!authorNgramFreq.containsKey(authorName)) {
					authorNgramFreq.put(authorName, 0.0);
				}
				if (!authorNgramTopicMap.containsKey(authorName)) {
					authorNgramTopicMap.put(authorName, new TreeSet<String>());
				}
				double currentFreq = authorNgramFreq.get(authorName);
				Set<String> topicMap = authorNgramTopicMap.get(authorName);
				authorNgramFreq.put(authorName, currentFreq + normalizedFreq);
				topicMap.add(topic);
				totalCount++;
			}

			int numAuthors = conf.getInt("numAuthors", 14);
			double iaf = (double) numAuthors / (double) authorSet.size();
			try {
				for (String authorName : authorNgramFreq.keySet()) {
					double authorFreq = authorNgramFreq.get(authorName);
					Double tfiaf = authorFreq * iaf * iaf;
					// Topic frequency specific code.
					if (conf.getBoolean("topicFreq", false)) {
						int authorTopicFreq = authorNgramTopicMap.get(
								authorName).size();
						tfiaf *= (authorTopicFreq * authorTopicFreq * authorTopicFreq);
					}
					String outputValue = key.toString() + ""
							+ tfiaf.toString();
					context.write(new Text(authorName), new Text(outputValue));
				}
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
		}
	}

	private static class NormalizerMapper extends
			Mapper<Object, Text, Text, Text> implements Configurable {

		private Configuration conf;

		public void map(Object key, Text value, Context context) {
			try {
				String[] tokens = value.toString().split("\t");
				context.write(new Text(tokens[0]), new Text(tokens[1]));
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
		}
	}

	private static class NormalizerReducer extends
			Reducer<Text, Text, Text, DoubleWritable> implements Configurable {

		private Configuration conf;

		private class NgramData implements Comparable<NgramData> {
			private String ngram;
			private Double score;

			public NgramData(String ngram, Double score) {
				this.ngram = ngram;
				this.score = score;
			}

			@Override
			public int compareTo(NgramData other) {
				if (this.score < other.score) {
					return -1;
				} else if (this.score > other.score) {
					return 1;
				}
				return 0;
			}

			public String getNgram() {
				return ngram;
			}

			public Double getScore() {
				return score;
			}
		}

		public void reduce(Text key, Iterable<Text> values, Context context) {
			String authorName = key.toString();
			SortedSet<NgramData> ngramSet = new TreeSet<NgramData>();

			for (Text value : values) {
				String[] tokens = value.toString().split("");
				ngramSet.add(new NgramData(tokens[0], Double
						.parseDouble(tokens[1])));
				if (ngramSet.size() > 500) {
					ngramSet.remove(ngramSet.first());
				}
			}

			try {
				for (NgramData data : ngramSet) {
					String outputKey = authorName + "" + data.getNgram();
					context.write(new Text(outputKey), new DoubleWritable(data
							.getScore()));
				}
			} catch (IOException e) {
				e.printStackTrace();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}

		@Override
		public Configuration getConf() {
			return conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
		}
	}

	public static void main(String[] args) throws Exception {
		String[] otherArgs = new GenericOptionsParser(args).getRemainingArgs();
		if (otherArgs.length < 3) {
			System.out
					.println("Usage: <program> <input> <output> <num-authors> <tfiaf-tpf>");
			System.exit(-1);
		}
		{
			Configuration conf = new Configuration();
			conf.setInt("numAuthors", Integer.parseInt(otherArgs[2]));
			if (otherArgs.length == 4) {
				conf.setBoolean("topicFreq", true);
				conf.set("topicMap", otherArgs[3]);
			}
			Job job = new Job(conf, "TfIafIntermediateGenerator");

			FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
			FileOutputFormat.setOutputPath(job, new Path("tmp"));

			job.setJarByClass(TfIafGenerator.class);
			job.setMapperClass(TfIafGenerator.TfIafGeneratorMapper.class);
			job.setReducerClass(TfIafGenerator.TfIafGeneratorReducer.class);
			job.setNumReduceTasks(88);
			NLineInputFormat.setNumLinesPerSplit(job, 1);
			job.setInputFormatClass(NLineInputFormat.class);

			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(Text.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(Text.class);

			job.waitForCompletion(true);
		}
		{
			Configuration conf = new Configuration();
			conf.setInt("numAuthors", Integer.parseInt(otherArgs[2]));
			Job job = new Job(conf, "TfIafGenerator");

			FileInputFormat.addInputPath(job, new Path("tmp"));
			FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

			job.setJarByClass(TfIafGenerator.class);
			job.setMapperClass(TfIafGenerator.NormalizerMapper.class);
			job.setReducerClass(TfIafGenerator.NormalizerReducer.class);
			job.setNumReduceTasks(Integer.parseInt(otherArgs[2]));

			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(Text.class);
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(DoubleWritable.class);

			job.waitForCompletion(true);
		}

		System.exit(0);
	}
}
