package jobs;

import java.io.IOException;

import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader;

import domain.Tweet;
import domain.User;

public class TweetRecordReader extends RecordReader<User, Tweet>{
    private User key;
    private Tweet value;	
	private KeyValueLineRecordReader lineReader;

	public TweetRecordReader(InputSplit split, TaskAttemptContext context) throws IOException {
		lineReader = new KeyValueLineRecordReader(context.getConfiguration());
	}
	
	@Override
	public void close() throws IOException {
		lineReader.close();
	}

	@Override
	public User getCurrentKey() throws IOException, InterruptedException {
		return this.key;
	}

	@Override
	public Tweet getCurrentValue() throws IOException, InterruptedException {
		return this.value;
	}

	@Override
	public float getProgress() throws IOException, InterruptedException {
		return lineReader.getProgress();
	}

	@Override
	public void initialize(InputSplit arg0, TaskAttemptContext arg1)
			throws IOException, InterruptedException {
		lineReader.initialize(arg0, arg1);
	}

	@Override
	public boolean nextKeyValue() throws IOException, InterruptedException {
		return lineReader.nextKeyValue();
//		if(lineReader.nextKeyValue()){
//			this.value = Tweet.getInstance().readFields(lineReader.getCurrentKey());
//			this.key = Tweet.getInstance().getUser(); 
//			return true;
//		}else{
//			return false;
//		}	
	}	
}
