package ru.mipt.victator.pagerank.rank;

import java.io.IOException;
import java.util.Map;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import ru.mipt.victator.pagerank.Value;

public class RankMap extends MapReduceBase implements Mapper
{
	private FileSystem FS = null;
	private JobConf conf;
	private float d;
	private float StartPR;
	private Path PR;
	private Map<String,FloatWritable> map;
	
	@Override
	public void configure(JobConf job)
	{
		conf = job;
		try
		{
			FS = FileSystem.get(conf);
		}
		catch (IOException e)
		{
			throw new RuntimeException(e);
		}
		
		d = Float.parseFloat(conf.get("RankJob.CalculationsParameter.Float"));
		StartPR = Float.parseFloat(conf.get("InputData.StartPageRank.Float"));
		PR = conf.getWorkingDirectory();
		try
		{
			map = SequenceFile2Map.Read(FS, job,PR);
			System.out.println("map.size() = " + map.size());
		}
		catch (IOException e)
		{
			throw new RuntimeException(e);
		}
	}

	@Override
	public void map(WritableComparable key, Writable value,
			OutputCollector output, Reporter reporter) throws IOException
	{
		Text text = ((Value)value).source;
		FloatWritable fw = map.get(text.toString());
		float pr;
		if (fw == null) pr = StartPR;
		else pr = fw.get();
		int n = ((Value)value).N.get();
		output.collect(key, new FloatWritable( d*pr/n ));
	}

}
