package parallel;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

public class BasicRangeRecordReader extends RecordReader<LongWritable, NullWritable> {
		long currentSequence;
		long finishedSequences;
		long totalSequences;

		public BasicRangeRecordReader(BasicRangeInputSplit split) {
			finishedSequences = 0;
			totalSequences = split.sequenceCount;
		}

		public void close() throws IOException {
			// NOTHING
		}

		public float getProgress() throws IOException {
			return finishedSequences / (float) totalSequences;
		}

		@Override
		public LongWritable getCurrentKey() throws IOException,
				InterruptedException {
			// TODO Auto-generated method stub
			return new LongWritable(currentSequence);
		}

		@Override
		public NullWritable getCurrentValue() throws IOException,
				InterruptedException {
			// TODO Auto-generated method stub
			return NullWritable.get();
		}

		@Override
		public void initialize(InputSplit split, TaskAttemptContext arg1)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			
			totalSequences = ((BasicRangeInputSplit)split).getSequenceCount();
			
		}

		@Override
		public boolean nextKeyValue() throws IOException,
				InterruptedException {
			// TODO Auto-generated method stub
			if (finishedSequences < totalSequences) {
				currentSequence = finishedSequences;
				finishedSequences += 1;
				return true;
			} else {
				return false;
			}
		}

	}
