package com.pxene.dmp.task;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.springframework.batch.core.JobExecutionException;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;

import com.pxene.dmp.constant.BaseConstant;
import com.pxene.dmp.constant.FileSuffixTime;
import com.pxene.dmp.main.JobManager;
import com.pxene.dmp.mapper.GetSearchWordMapper;
import com.pxene.dmp.reducer.GetSearchWordReducer;

public class GetSearchWordTask extends MapReduceTask {
	
	@Override
	public RepeatStatus execute(StepContribution contribution, ChunkContext context) 
			throws Exception {
		String datehour = FileSuffixTime.getDatehour();
		
		String inputPath = BaseConstant.HDFS_TEMP_ROOT_DIR
 						 + datehour + "/"
				   		 + BaseConstant.HDFS_TEMP_CLEARDATA_DIR
				   		 + BaseConstant.HDFS_TEMP_SEARCHWORD_DIR;
		String outputPath = BaseConstant.HDFS_TEMP_ROOT_DIR
						  + datehour + "/"
		  		 		  + BaseConstant.HDFS_TEMP_GETDATA_DIR
		  		 		  + BaseConstant.HDFS_TEMP_SEARCHWORD_DIR;
		
		if (!hadoopFs.exists(new Path(inputPath))) {
			throw new JobExecutionException(BaseConstant.LOG_PREFIX + inputPath + " not exists");
		}
		
		Job job = Job.getInstance(configuration, "getSearchWord");
		job.setJarByClass(JobManager.class);
		
		// map
		job.setMapperClass(GetSearchWordMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		// reduce
		job.setReducerClass(GetSearchWordReducer.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(Text.class);
		job.setNumReduceTasks(BaseConstant.REDUCE_NUMBER);
		
		job.setInputFormatClass(SequenceFileInputFormat.class);
		FileInputFormat.addInputPath(job, new Path(inputPath));
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		FileOutputFormat.setOutputPath(job, new Path(outputPath));
		
		job.waitForCompletion(true);
		
		return RepeatStatus.FINISHED;
	}

}
