package com.pxene.dmp.task;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.springframework.batch.core.JobExecutionException;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;

import com.pxene.dmp.constant.BaseConstant;
import com.pxene.dmp.constant.FileSuffixTime;
import com.pxene.dmp.main.JobManager;
import com.pxene.dmp.mapper.SendDomainUserMapper;
import com.pxene.dmp.reducer.CountDataSizeReducer;

public class SendDomainUserTask extends MapReduceTask {

	@Override
	public RepeatStatus execute(StepContribution contribution, ChunkContext context) 
			throws Exception {
		String datehour = FileSuffixTime.getDatehour();
		
		String inputPath = BaseConstant.HDFS_TEMP_ROOT_DIR
				         + datehour + "/"
				         + BaseConstant.HDFS_TEMP_GETDATA_DIR
				         + BaseConstant.HDFS_TEMP_DOMAINUSER_DIR;
		String outputPath = BaseConstant.HDFS_TEMP_ROOT_DIR 
				          + datehour + "/"
						  + BaseConstant.HDFS_TEMP_SENDDATA_DIR
						  + BaseConstant.HDFS_TEMP_DOMAINUSER_DIR;
		
		if (!hadoopFs.exists(new Path(inputPath))) {
			throw new JobExecutionException(BaseConstant.LOG_PREFIX + inputPath + " not exists");
		}
		
		configuration.set("date", datehour);
		configuration.set("api-name", BaseConstant.API_SETDOMAINUSER);
		configuration.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
		
		Job job = Job.getInstance(configuration, "sendDomainUser");
		job.setJarByClass(JobManager.class);
		// map
		job.setMapperClass(SendDomainUserMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);
		// reduce
		job.setReducerClass(CountDataSizeReducer.class);
		job.setOutputKeyClass(NullWritable.class);
		job.setOutputValueClass(NullWritable.class);
		job.setNumReduceTasks(1);
		
		job.setInputFormatClass(SequenceFileInputFormat.class);
		FileInputFormat.addInputPath(job, new Path(inputPath));
		// 设置split的大小为8M,这样会产生更多的进程，发送更快
		FileInputFormat.setMaxInputSplitSize(job, 1024*1024*4L);
		FileInputFormat.setMinInputSplitSize(job, 1024L);
		job.setOutputFormatClass(TextOutputFormat.class);
		FileOutputFormat.setOutputPath(job, new Path(outputPath)); 
		
		job.waitForCompletion(true);
		
		return RepeatStatus.FINISHED;
	}

}
