package cn.edu.xmu.datamining.tangzk.mralgos.pagerank;

import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapreduce.Reducer;

import cn.edu.xmu.datamining.tangzk.util.Joiner;

/**
 * PageRank Reducer
 * input: <id, List<(original pagerank, adjnodes)|(unnormalized pagerank)>>
 * 
 * output:
 * <id, (new unnormalized pagerank, adjnodes)>
 * 
 * be sure of the structure info before the pagerank from others nodes after the
 * shuffle & sort.
 * terminating condition: \sum{diff(origPageRank, newPageRank)} < \xi
 * 
 * @version 2013-8-4
 * @author tangzk
 * @Reviewer
 * 
 */
public class PageRankReducer
		extends
		Reducer<PageRankNodeKeyWritable, PageRankNodeValueWritable, IntWritable, Text> {

	private long N = 0;
	private long sumPagerankL = 0;
	private float sumPagerankF = 0.0f;
	private float beta = 1.0f;

	protected void setup(
			org.apache.hadoop.mapreduce.Reducer<PageRankNodeKeyWritable, PageRankNodeValueWritable, IntWritable, Text>.Context context)
			throws java.io.IOException, InterruptedException {
		Configuration conf = context.getConfiguration();
		JobClient client = new JobClient(new JobConf(conf));
		RunningJob parentJob = client.getJob(JobID.forName(conf
				.get("mapred.job.id")));

		N = parentJob.getCounters().getGroup(PageRankDriver.USER_DEFINED_GROUP)
				.getCounter(PageRankDriver.TOTAL_NODES);
		sumPagerankL = parentJob.getCounters()
				.getGroup(PageRankDriver.USER_DEFINED_GROUP)
				.getCounter(PageRankDriver.SUM_PAGERANK_COUNTER);
		sumPagerankF = sumPagerankL / (PageRankDriver.PAGERANK_BASE * 1.0f);

		if (N == 0 || sumPagerankL == 0L
				|| Float.compare(sumPagerankF, 0.0f) == 0) {
			System.out.println("N:" + N + ", sumPagerankL=" + sumPagerankL
					+ ", sumPagerankF=" + sumPagerankF);
			throw new java.io.IOException("can't get the counter value.");
		}

		beta = conf.getFloat(PageRankDriver.BETA_NAME, 1.0f);
		System.out.println(beta);
	};

	protected void reduce(
			PageRankNodeKeyWritable key,
			java.lang.Iterable<PageRankNodeValueWritable> values,
			org.apache.hadoop.mapreduce.Reducer<PageRankNodeKeyWritable, PageRankNodeValueWritable, IntWritable, Text>.Context context)
			throws java.io.IOException, InterruptedException {

		Iterator<PageRankNodeValueWritable> valIter = values.iterator();
		PageRankNodeValueWritable struct = null;
		if (valIter.hasNext()) {
			// get structure info in the first position
			struct = valIter.next();
		} else {
			return;
		}
		float pagerank = 0.0f;
		while (valIter.hasNext()) {
			PageRankNodeValueWritable node = valIter.next();
			// normalized and average
			pagerank += (node.getPagerank() / sumPagerankF / node.getLen());
		}
		pagerank = beta * pagerank + (1 - beta) / (N * 1.0f);
		context.getCounter(PageRankDriver.USER_DEFINED_GROUP,
				PageRankDriver.SUM_PAGERANK_DIFF)
				.increment(
						(long) (Math.abs(pagerank - struct.getPagerank()) * PageRankDriver.PAGERANK_BASE));

		StringBuilder sb = new StringBuilder();
		sb.append(pagerank).append("\t");
		if (struct != null) {
			sb.append(Joiner.on(struct.getAdjNodes().iterator(), ","));
		}

		context.write(new IntWritable(key.getId()), new Text(sb.toString()));
	};

}
