package mr.fileToFile;

import java.io.IOException;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * 重命名
 * @author 李岩飞
 * @email eliyanfei@126.com	
 * 2017年12月18日 上午10:17:05
 *
 */
public class S1MMEOutputFormat extends FileOutputFormat<Text, Text> {
	private String prefix = "S1MME_";

	@Override
	public RecordWriter<Text, Text> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {
		Path outputDir = FileOutputFormat.getOutputPath(job);
		String nowtime = job.getConfiguration().get("nowtime");
		String[] arrs = job.getTaskAttemptID().getTaskID().toString().split("_");
		String subfix = arrs[arrs.length - 1];
		Path path = new Path(outputDir.toString() + "/" + prefix + nowtime + "_" + subfix + ".csv");
		FSDataOutputStream fileOut = path.getFileSystem(job.getConfiguration()).create(path);
		return new S1MMERecordWriter(fileOut);
	}

}