package com.mall.hadoop.job.comjob;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.log4j.Logger;

@SuppressWarnings({"rawtypes","unchecked"})
public class CommonJob {

	public static Logger logger = Logger.getLogger(CommonJob.class);
	
	/**
	 * 公共job任务类
	 * @param conf 配置信息
	 * @param jobName job名称
	 * @param input hdfs输入文件路径
	 * @param output 输出到hdfs文件系统的路径
	 * @param mapClazz map函数计算
	 * @param reduceClass reduce函数计算
	 * @param formatClass 输出文件重命名
	 * @return
	 */
	public static String commonJobStart(Configuration conf,String jobName,String input,String output,Class mapClazz,Class reduceClass, Class formatClass){
		String res = null;
		FileSystem fs = null;
		try{
			conf.set("fs.hdfs.impl.disable.cache", "true"); 
			fs = FileSystem.get(conf);
			if(fs.exists(new Path(input))){
				Job job = Job.getInstance(conf);
				job.setJarByClass(CommonJob.class);
				job.setJobName(jobName);
				job.setMapperClass(mapClazz);
				job.setReducerClass(reduceClass);
				job.setOutputKeyClass(Text.class);
				job.setOutputValueClass(IntWritable.class);
				// 设置reduce的任务个数
				job.setNumReduceTasks(1);
				//重命名输出文件
				job.setOutputFormatClass(formatClass);
					
				FileInputFormat.addInputPath(job,new Path(input));
				//输出hdfs文件目录
				Path out = new Path(output);
				if(fs.exists(out)){
					fs.delete(out, true);
				}
				FileOutputFormat.setOutputPath(job, out);
				job.waitForCompletion(true);
				res = "success";
			}else{
				res = "文件不存在";
			}
		}catch(Exception e){
			res = "exception";
			logger.info("hadoop开启"+jobName+"失败");
		}finally{
			try {
				fs.close();
			} catch (IOException e) {
				logger.info("关闭Filesystem出现异常："+e.getMessage());
			}
		}
		return res;
	}
}
