package cn.hyxy.hadoop;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class Demo14_SaveSequenceFileMR extends Configured implements Tool {
	@Override
	public int run(String[] args) throws Exception {
		if (args.length != 2) {
			System.out.println("usage : in out...");
			return -1;
		}
		// 7:声明Job
		Configuration config = getConf();
		FileSystem fs = FileSystem.get(config);
		Path path = new Path(args[1]);
		if (fs.exists(path)) {
			fs.delete(path, true);
		}
		// 8:Job
		Job job = Job.getInstance(config, "小文件");
		job.setJarByClass(getClass());
		//
		job.setMapperClass(Mapper.class);// 默认的！！！
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(BytesWritable.class);
		// 最后还需要shuffle,将3个文件合并成一个文件
		// job.setNumReduceTasks(0);
		//
		job.setInputFormatClass(WholeFileInputFormat.class);
		WholeFileInputFormat.addInputPath(job, new Path(args[0]));
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		SequenceFileOutputFormat.setOutputPath(job, path);

		return job.waitForCompletion(true) ? 0 : 1;
	}

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo14_SaveSequenceFileMR(), args);
		System.exit(code);
	}

	// 开发InputFormat，一次读取一个整个的文件
	// 几个文件，就会几次INputFormat,每一个InputForamt就是Split一次，
	// 一个块就是128M，这个是不会变的。刚开始有3个文件，需要3次InputFormat,即有3个Split；
	// 在进入自定义的WholeFileInputFormat，执行完之后，进入Mapper之前，还是3个文件，但是此时是1个split，即此时只有1个流。
	public static class WholeFileInputFormat extends FileInputFormat<Text, BytesWritable> {

		@Override
		protected boolean isSplitable(JobContext context, Path filename) {// 控制单个文件是否分块！！！
			return true;// 如果是false,即便是大于128M，也不会分块。但这句话在这里没有意义，因为3个文件加起来没有128M。
		}

		@Override
		public RecordReader<Text, BytesWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
				throws IOException, InterruptedException {
			WholeFlieReader wReader = new WholeFlieReader();
			wReader.initialize(split, context);
			return wReader;
		}
	}

	// 2；创建读取文件的对象
	public static class WholeFlieReader extends RecordReader<Text, BytesWritable> {
		private Text fileName = new Text();
		private BytesWritable value; // 不能new
		private boolean next = true;

		@Override
		public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
			FileSplit fileSplit = (FileSplit) split;
			Path path = fileSplit.getPath();
			fileName.set(path.getName()); // 设置文件名
			// 开始读取内容
			FileSystem fs = FileSystem.get(context.getConfiguration());
			InputStream in = fs.open(path);
			// 就可以读取bytes
			ByteArrayOutputStream bys = new ByteArrayOutputStream();
			int len = 0;
			byte[] bytes = new byte[1024 * 128];// 目前的标准缓存大小
			while ((len = in.read(bytes)) != -1) {
				bys.write(bytes, 0, len);
			}
			in.close();
			bys.close();
			value = new BytesWritable(bys.toByteArray());
		}

		@Override
		public void close() throws IOException {

		}

		@Override
		public boolean nextKeyValue() throws IOException, InterruptedException {
			return next;
		}

		@Override
		public Text getCurrentKey() throws IOException, InterruptedException {
			next = false;
			return fileName;
		}

		@Override
		public BytesWritable getCurrentValue() throws IOException, InterruptedException {
			return value;
		}

		@Override
		public float getProgress() throws IOException, InterruptedException {
			return 0;
		}
	}

	@Override
	public Configuration getConf() {
		return null;
	}

	@Override
	public void setConf(Configuration conf) {
	}
}
