package jobs; 
 
import java.io.IOException; 
 
import org.apache.hadoop.io.Text; 
import org.apache.hadoop.mapreduce.InputSplit; 
import org.apache.hadoop.mapreduce.RecordReader; 
import org.apache.hadoop.mapreduce.TaskAttemptContext; 
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader; 
 
 
public class JoinRecordReader extends RecordReader<Text, Text>{ 
    private KeyValueLineRecordReader keyvaluelineReader; 
     
    public JoinRecordReader(TaskAttemptContext context) { 
        try { 
        	keyvaluelineReader = new KeyValueLineRecordReader(context.getConfiguration()); 
        } catch (IOException e) { 
            e.printStackTrace(); 
        } 
    } 
 
    @Override 
    public void close() throws IOException { 
    	keyvaluelineReader.close(); 
    } 
 
    @Override 
    public Text getCurrentKey() throws IOException, InterruptedException {
    	return keyvaluelineReader.getCurrentKey();
    } 
 
    @Override 
    public Text getCurrentValue() throws IOException, InterruptedException {
    	return keyvaluelineReader.getCurrentValue();
    } 
 
    @Override 
    public float getProgress() throws IOException, InterruptedException { 
        return keyvaluelineReader.getProgress(); 
    } 
 
    @Override 
    public void initialize(InputSplit split, TaskAttemptContext context) 
            throws IOException, InterruptedException { 
    	keyvaluelineReader.initialize(split, context); 
    } 
 
    @Override 
    public boolean nextKeyValue() throws IOException, InterruptedException { 
        return keyvaluelineReader.nextKeyValue(); 
    } 
 
}