package sc.summary;


import java.io.IOException;
import java.util.LinkedList;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import sc.writable.IntermediateData;
import sc.writable.StockSummary;


/**
 * Reducer of the Summary. Recieves the data from all the analysis for a single
 * stock.
 * 
 * TODO give more weight to some indicators etc et tout
 *
 * @author Quentin Rossé
 *
 */
public  class SummaryReducer extends Reducer<Text, IntermediateData, 
                                        StockSummary, IntermediateData> {
	
	private static StockSummary ss = new StockSummary();
	
	@Override
	public void reduce(Text key, Iterable<IntermediateData> values, Context context) throws IOException, InterruptedException {
		int[] score = new int[IntermediateData.Type.values().length];
		
		LinkedList<IntermediateData> data = new LinkedList<IntermediateData>();
		
		for(int i=0; i<score.length; i++)
			score[i] = 0;
		
		System.out.println(key);
		for(IntermediateData imd : values){
			//System.out.print("score : "+ imd.getScore() + "  old value : " +score[imd.getType().ordinal()]);
			System.out.println("old imd : " + imd);
			score[imd.getType().ordinal()] += imd.getScore();
			data.add(new IntermediateData(imd));
			//System.out.println("  new value : " +score[imd.getType().ordinal()]);
		}
		
		ss.setStock(key.toString());
		ss.setScore(score);
		for(IntermediateData imd : data){
			System.out.println("written : " + imd);
			context.write(ss, imd);
		}
	}
	
	@Override
	public void setup(Context context){}
	
	@Override
	public void cleanup(Context context){}
	
}