package com.thp.bigdata.myInputFormat;

import java.io.IOException;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

/**
 * 自定义InputFormate类的用来将小文件输出合并成大文件
 * @author 汤小萌
 *
 */
public class WholeFileInputFormat extends FileInputFormat<NullWritable, BytesWritable> {

	
	/**
	 * 设置每个小文件不可分片，保证一个小文件生成一个  key - value 对
	 */
	@Override
	protected boolean isSplitable(JobContext context, Path filename) {
		// 默认是true
		return false;
	}
	
	
	@Override
	public RecordReader<NullWritable, BytesWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
			throws IOException, InterruptedException {
		WholeFileReader reader = new WholeFileReader();
		reader.initialize(split, context);
		return reader;
	}
	
	static class WholeFileReader extends RecordReader<NullWritable, BytesWritable> {

		private FileSplit fileSplit;
		private Configuration conf;
		private BytesWritable value = new BytesWritable();
		
		private boolean processed = false;
		
		@Override
		public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
			this.fileSplit = (FileSplit) split;
			this.conf = context.getConfiguration();
		}

		
		
		@Override
		public boolean nextKeyValue() throws IOException, InterruptedException {
			System.out.println("首 ：" + processed);
			if(!processed) {
				byte[] contents = new byte[(int) fileSplit.getLength()];  // 用来存放文件内容的字节数组
				Path file = fileSplit.getPath();
				FileSystem fs = file.getFileSystem(conf);
				FSDataInputStream in = null;
				try {
					in = fs.open(file);
					IOUtils.readFully(in, contents, 0, contents.length);
					value.set(contents, 0, contents.length);
				} finally {
					IOUtils.closeQuietly(in);
				}
				processed = true;
				return true;
			}
			System.out.println("尾 ： " + processed);
			return false;
		}

		@Override
		public NullWritable getCurrentKey() throws IOException, InterruptedException {
			return NullWritable.get();
		}

		@Override
		public BytesWritable getCurrentValue() throws IOException, InterruptedException {
			return value;
		}

		@Override
		public float getProgress() throws IOException, InterruptedException {
			return processed ? 1.0f : 2.0f;
		}

		@Override
		public void close() throws IOException {
			// do nothing
		}
		
	}
	
	

}
