package regression.model;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextOutputFormat;

/**
 * Initialization of Poisson
 * @author tigerzhong
 *
 */
public class PoissonInitialize {
    /**
     * Reducer for Initialization
     * @author tigerzhong
     *
     */
    public static class Reduce extends MapReduceBase implements
	    Reducer<Text, Text, Text, Text> {
	/**
	 * Number of features 
	 */
	private int numFeatures;
	/**
	 * Number of classes
	 */
	private int numClasses;
	/**
	 * Fraction of Poisson's parameters 
	 */
	private double[][] yx = null;
	/**
	 * Another fraction of Poisson's parameters
	 */
	private double[] x = null;
	/**
	 * Output values
	 */
	private Text tVal = new Text();
	
	@Override
	public void configure(JobConf job) {
	    numClasses = job.getInt("Data.NumClasses", 1);
	    numFeatures = job.getInt("Data.NumFeatures", 8);

	    yx = new double[numFeatures][numClasses];
	    x = new double[numFeatures];
	}

	@Override
	public void reduce(Text inKey, Iterator<Text> inValues,
		OutputCollector<Text, Text> output, Reporter report)
		throws IOException {
	    int key = Integer.parseInt(inKey.toString());
	    while (inValues.hasNext()) {
		String items[] = inValues.next().toString().split(",", -1);
		for (int j = 0; j < numClasses; j++) {
		    yx[key][j] += Double.parseDouble(items[j]);
		}
		x[key] += Double.parseDouble(items[numClasses]);
	    }
	    for (int i = 0; i < numClasses; i++) {
		yx[key][i] = yx[key][i] / x[key];
	    }
	    String outValue = "";
	    for (int j = 0; j < numClasses; j++) {
		outValue += (yx[key][j] + "\t");
	    }
	    tVal.set(outValue);

	    output.collect(inKey, tVal);
	}

    }

    /**
     * Mapper for Initialization
     * @author tigerzhong
     *
     */
    public static class Map extends MapReduceBase implements
	    Mapper<LongWritable, Text, Text, Text> {
	/**
	 * Number of features 
	 */
	private int numFeatures;
	/**
	 * Number of classes
	 */
	private int numClasses;
	/**
	 * Number of features to skip
	 */
	private int numSkip;
	/**
	 * Output key
	 */
	private Text key = new Text();
	/**
	 * Output value
	 */
	private Text value = new Text();

	@Override
	public void map(LongWritable inKey, Text inValue,
		OutputCollector<Text, Text> output, Reporter reporter)
		throws IOException {
	    /*Split*/
	    String items[] = inValue.toString().split(",", -1);
	    double instance[] = new double[numFeatures];
	    double target[] = new double[numClasses];
	    for (int i = 0; i < numFeatures; i++)
		instance[i] = Double.parseDouble(items[i + numSkip]);
	    for (int i = 0; i < numClasses; i++)
		target[i] = Double.parseDouble(items[i + numSkip + numFeatures]);
	    double xvalue = 0.0;
	    for (int i = 0; i < numFeatures; i++) {
		xvalue += instance[i];
	    }
	    for (int i = 0; i < numFeatures; i++) {
		double yx[] = new double[numClasses];
		String outValue = "";
		for (int j = 0; j < numClasses; j++) {
		    yx[j] = target[j] * instance[i] / xvalue;
		    outValue += (yx[j] + ",");
		}
		outValue += instance[i];
		key.set(i + "");
		value.set(outValue);
		output.collect(key, value);
	    }
	}

	@Override
	public void configure(JobConf job) {
	    numClasses = job.getInt("Data.NumClasses", 1);
	    numFeatures = job.getInt("Data.NumFeatures", 8);
	    numSkip = job.getInt("Data.NumSkip", 3);
	}
    }
    
    /**
     * Controller
     * @param confPath
     * @throws Exception
     */
    public static void run(String confPath) throws Exception {
	JobConf jConf = new JobConf(confPath);
	jConf.setJobName("PoissonInitialize");
	jConf.setJar(jConf.get("Hadoop.Jar"));
	jConf.setOutputKeyClass(Text.class);
	jConf.setOutputValueClass(Text.class);
	jConf.setMapperClass(Map.class);
	jConf.setReducerClass(Reduce.class);
	jConf.setOutputFormat(TextOutputFormat.class);
	jConf.setNumMapTasks(jConf.getInt("Hadoop.NumOfMap", 100));
	jConf.setNumReduceTasks(jConf.getInt("Hadoop.NumOfReduce", 0));
	FileInputFormat.setInputPaths(jConf, new Path(jConf.get("Data.TrainInputPath")));
	FileOutputFormat.setOutputPath(jConf, new Path(jConf.get("Data.OutputPath")+ "0/"));
	JobClient.runJob(jConf);
    }
}
