package docsim;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.StringUtils;

public class Reduce3 extends MapReduceBase
		implements Reducer<Text, Text, Text, Text> {

	private static final int ONE = 1;
	private static final int ZERO = 0;
	private static final String closing_bracket = "}";
	private static final String score_const = ",\"score\":";
	private static final String d2_const = ",\"d2\":";
	private static final String d1_const = "{\"d1\":";
	private static final String colon = ":";
	private static String idf_name = "idf.txt";
	private static String tab = "\t";
	private static Text ADMRecord;
	private static double sum;
	private static HashMap<String, Double> idfMap = new HashMap<String, Double>();

	@Override
	public void configure(JobConf job) {

		Path[] docidMapFile;
		try {
			docidMapFile = DistributedCache.getLocalCacheFiles(job);
			for (Path docidFile : docidMapFile) {
				if (docidFile.getName().matches(idf_name)) {
					BufferedReader fis;
					fis = new BufferedReader(new FileReader(docidFile.toString()));
					String line;
					while ((line = fis.readLine()) != null) {
						String[] split = line.split(tab);
						String word = split[ZERO];
						Double idf = Double.parseDouble(split[ONE]);
						idfMap.put(word, idf);
					}
					break;
				}
			}
		} catch (IOException e2) {
			// TODO Auto-generated catch block
			e2.printStackTrace();
		}

	}

	@Override
	public void reduce(Text key, Iterator<Text> values,
			OutputCollector<Text, Text> collector, Reporter arg3)
			throws IOException {
		sum = 0;
		while(values.hasNext()) {
			String wordcount = values.next().toString();
			String[] split = wordcount.split(colon);
			String word = split[0];
			int tf = Integer.valueOf(split[1]);
			double idf = idfMap.get(word); // replace by idf = DHT.get(wc.word);
			sum += (idf * tf);
		}
		String[] docs = key.toString().split(colon); 
		
		ADMRecord = new Text(d1_const+ docs[0] + d2_const + docs[1] + score_const + sum + closing_bracket);
		collector.collect(null, ADMRecord);
		//System.out.println("one reduce3 output writen");
	}
}