package com.mr.outputformat;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

public class MyRecordWriter extends RecordWriter<String, NullWritable> {

	private Path atguigu = new Path("E:/尚硅谷大数据/05-Hadoop，HadoopHA，Zookeeper/05-Mapreduce/mroutput/outputformat/atguigu.log");
	private Path other = new Path("E:/尚硅谷大数据/05-Hadoop，HadoopHA，Zookeeper/05-Mapreduce/mroutput/outputformat/other.log");

	private FileSystem fs;

	private FSDataOutputStream atguiguOS;
	private FSDataOutputStream otherOS;

	private TaskAttemptContext context;

	public MyRecordWriter(TaskAttemptContext job) throws IOException {

		context = job;

		// 获取配置环境
		Configuration conf = job.getConfiguration();

		// 获取当前文件系统
		fs = FileSystem.get(conf);

		// 获取输出流
		atguiguOS = fs.create(atguigu);
		otherOS = fs.create(other);
	}

	// 负责将key-value写出到文件
	@Override
	public void write(String key, NullWritable value) throws IOException, InterruptedException {

		if(key.contains("atguigu")) {
			atguiguOS.write(key.getBytes());

			// 计数器
			// 统计含有atguigu的个数
			context.getCounter("MyCounter", "atguiguNumber").increment(1);

		}else {
			otherOS.write(key.getBytes());

			// 统计不含atguigu的个数
			context.getCounter("MyCounter", "otherNumber").increment(1);

		}

	}

	// 关闭操作
	@Override
	public void close(TaskAttemptContext context) throws IOException, InterruptedException {

		if(atguiguOS != null) {
			IOUtils.closeStream(atguiguOS);
		}

		if(otherOS != null) {
			IOUtils.closeStream(otherOS);
		}

		if(fs != null) {
			fs.close();
		}


	}

}
