package com.hw.mapreduce.service;

import java.io.IOException;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

@Service
public class WordCountMRService {
	
	@Autowired
	private Configuration conf;
	private static final int bufsize=4 * 1024 * 1024;
	private Job job=null;
	
	@PostConstruct
	private void init() throws IOException{
		
		job = Job.getInstance(conf, "WordCountMR");
		 
		job.setMapperClass(WordMapper.class);
		job.setCombinerClass(WordReduce.class);
		job.setReducerClass(WordReduce.class);

//		指定Mapper输出数据的（key,value）类型
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);
//		指定Mapper输出数据的（key,value）类型
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		

		// 小文件合并设置
		job.setInputFormatClass(CombineTextInputFormat.class);
		// 最大分片
		CombineTextInputFormat.setMaxInputSplitSize(job, bufsize);
		// 最小分片
		CombineTextInputFormat.setMinInputSplitSize(job, bufsize);
	}
	
	@PreDestroy
	public void cleaner() throws IOException{
		job.killJob();
	}
	
	public void doWordCount(String inputPath, String outputPath) throws IllegalArgumentException, IOException, ClassNotFoundException, InterruptedException{
		
		if(StringUtils.isEmpty(inputPath) || StringUtils.isEmpty(outputPath)){
			return;
		}
		
		FileInputFormat.addInputPath(job, new Path(inputPath));
		FileOutputFormat.setOutputPath(job, new Path(outputPath));
		
//		将job交给YARN去运行
		boolean complate=job.waitForCompletion(true);
//		job.submit();
		
	}
	

}
