import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;

  public class BytesReader implements RecordReader<IntWritable,BytesWritable>
	  {
		  private byte[] values;
		  private FileSplit file;
		  private Configuration job;
		  private Boolean read = false;
		  
		public BytesReader(InputSplit split, JobConf conf) throws IOException
		{
			file = (FileSplit)split;
			job = conf;
			values = new byte[(int) split.getLength()+99];
		}
		  

		@Override
		public boolean next(IntWritable key, BytesWritable value)
				throws IOException {
			if(!read)
			{
				Path path = file.getPath();
				FileSystem fs = path.getFileSystem(job);
				FSDataInputStream reader = fs.open(path);
				reader.seek(file.getStart());
				if(file.getStart()==1006632960)
				{
					IOUtils.readFully(reader,values,0,values.length-99);
				}
				else IOUtils.readFully(reader,values,0,values.length);
				System.err.println("Read to buffer");
				IOUtils.closeStream(reader);
				read = true;
				return true;
			}
			return false;
		}

		@Override
		public IntWritable createKey() {
			return new IntWritable((int)file.getStart());
		}

		@Override
		public BytesWritable createValue() {
			// TODO Auto-generated method stub
			return new BytesWritable(values);
		}

		@Override
		public long getPos() throws IOException {
			// TODO Auto-generated method stub
			return 0;
		}

		@Override
		public void close() throws IOException {
			// TODO Auto-generated method stub
			
		}

		@Override
		public float getProgress() throws IOException {
			// TODO Auto-generated method stub
			return read ? 1.0f : 0.0f;
		}
		  
	  }