package com.hadoop.mr2.output;

import java.io.IOException;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;

public class AuthRecordWriter<K,V> extends RecordWriter<K, V>{
	
	private FSDataOutputStream out;

	public AuthRecordWriter(FSDataOutputStream out) {
		this.out=out;
	}
	
	/*
	 * 输出结果的格式，取决于write()方法
	 * key是输出key
	 * value是输出value
	 */
	@Override
	public void write(K key, V value) throws IOException, InterruptedException {
		//把输出key写到文件里
		out.write(key.toString().getBytes());
		//输出kv之间的分隔符
		out.write("$".getBytes());
		//输出 value
		out.write(value.toString().getBytes());
		// 输出行于行之间的分隔符
		out.write("*****".getBytes());
		
	}

	@Override
	public void close(TaskAttemptContext context) throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		
	}

}
