package org.shj.hadoop.customerinputformat;

import java.io.IOException;

import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

public class FindMaxValueRecordReader extends RecordReader<IntWritable, ArrayWritable>{

	private int end;
	private int index;
	private int start;
	private IntWritable key;
	private ArrayWritable value;
	private FindMaxValueInputSplit fmvSplit;
	
	@Override
	public void close() throws IOException {
		// TODO Auto-generated method stub
		
	}
	@Override
	public IntWritable getCurrentKey() throws IOException, InterruptedException {
		return key;
	}
	@Override
	public ArrayWritable getCurrentValue() throws IOException, InterruptedException {
		return value;
	}
	@Override
	public float getProgress() throws IOException, InterruptedException {
		if(this.index == this.end){
			return 0.0f;
		}else{
			return Math.min(1.0f, (index - start) / (float)(end - start));
		}
	}
	@Override
	public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
		this.fmvSplit = (FindMaxValueInputSplit) split;
		start = fmvSplit.getM_StartIndex();
		end = fmvSplit.getM_EndIndex();
		index = this.start;
		
	}
	@Override
	public boolean nextKeyValue() throws IOException, InterruptedException {
		if(key == null){
			key = new IntWritable();
		}
		if(value == null){
			value = new ArrayWritable(FloatWritable.class);
		}
		
		if(index <= end){
			key.set(index);
			value = fmvSplit.getM_FloatArray();
			index = end + 1;
			return true;
		}else{
			return false;
		}
	}
	
	
}
