package com.sarnath.sardoop.modules.hadoop.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import com.sarnath.sardoop.common.spring.SpringUtil;
import com.sarnath.sardoop.common.utils.StringUtils;
import com.sarnath.sardoop.modules.hadoop.entity.MrJobCreateEntity;
import com.sarnath.sardoop.modules.sys.util.UserUtils;


public class JobUtils {
	public static final int TEXT_FORMAT=1;
	public static final int HBASE_FORMAT=2;
	public static final String STRING="string";
	public static final String INT="int";
	public static ByteClassLoader loader=SpringUtil.getBean("byteClassLoader");
	
	public static Job getJob(Configuration conf,MrJobCreateEntity entity) throws Exception{
		Job job = Job.getInstance(conf, entity.getName());
		job.addArchiveToClassPath(new Path(entity.getJarPath()));
		//job.setJarByClass(Class.forName(entity.getMapper().getClassName()));
		job.setJarByClass(getMapperClass(entity));
		if(entity.getMapper().getType()==TEXT_FORMAT && entity.getReducer().getType()==TEXT_FORMAT){
			textToTextJob(job,entity);
		}
		if(entity.getMapper().getType()==HBASE_FORMAT && entity.getReducer().getType()==HBASE_FORMAT){
			baseTobase(job,entity);
		}
		if(entity.getMapper().getType()==TEXT_FORMAT && entity.getReducer().getType()==HBASE_FORMAT){
			textTobase(job,entity);
		}
		if(entity.getMapper().getType()==HBASE_FORMAT && entity.getReducer().getType()==TEXT_FORMAT){
			baseToText(job,entity);
		}
		if(StringUtils.isNotBlank(entity.getCombinerClass())){
			job.setCombinerClass(getCombineClass(entity)); //为job设置Combiner类
		}
		job.setMaxMapAttempts(1);
		job.setMaxReduceAttempts(1);
		job.setMapSpeculativeExecution(false);
		job.setNumReduceTasks(1);
		return job;
	}
	private static void textToTextJob(Job job,MrJobCreateEntity entity) throws Exception{
		job.setMapperClass(getMapperClass(entity));//为job设置Mapper类
		job.setReducerClass(getReducerClass(entity));//为job设置Reducer类
		job.setOutputKeyClass(getDataType(entity.getMapper().getOutKeyType())); //为job的输出数据设置Key类
		job.setOutputValueClass(getDataType(entity.getMapper().getOutValueType()));//为job输出设置value类
		FileInputFormat.addInputPath(job, new Path(entity.getMapper().getPath()));//为job设置输入路径
		FileOutputFormat.setOutputPath(job, new Path(entity.getReducer().getPath()));//为job设置输出路径
	}
	private static void baseTobase(Job job,MrJobCreateEntity entity) throws Exception{
		Scan scan = new Scan();
		scan.setCaching(1000);        // 1 is the default in Scan, which will be bad for MapReduce jobs
		scan.setCacheBlocks(false);  // don't set to true for MR jobs
		TableMapReduceUtil.initTableMapperJob(
			StringUtils.join(new String[]{UserUtils.getNameSpace(),entity.getMapper().getTableName()}, ":"),        // input table
			scan,               // Scan instance to control CF and attribute selection
			getMapperClass(entity),     // mapper class
			getDataType(entity.getMapper().getOutKeyType()),         // mapper output key
			getDataType(entity.getMapper().getOutValueType()),  // mapper output value
			job);
		TableMapReduceUtil.initTableReducerJob(
			StringUtils.join(new String[]{UserUtils.getNameSpace(),entity.getReducer().getTableName()}, ":"), // output table
			getReducerClass(entity),    // reducer class
			job);
	}
	private static void textTobase(Job job,MrJobCreateEntity entity) throws Exception{
		job.setMapperClass(getMapperClass(entity));//为job设置Mapper类
		FileInputFormat.addInputPath(job, new Path(entity.getMapper().getPath()));//为job设置输入路径
		TableMapReduceUtil.initTableReducerJob(
			StringUtils.join(new String[]{UserUtils.getNameSpace(),entity.getReducer().getTableName()}, ":"),        // output table
			getReducerClass(entity),    // reducer class
			job);
		job.setOutputKeyClass(getDataType(entity.getMapper().getOutKeyType())); //为job的输出数据设置Key类
		job.setOutputValueClass(getDataType(entity.getMapper().getOutValueType()));//为job输出设置value类
	}
	private static void baseToText(Job job,MrJobCreateEntity entity) throws Exception{
		Scan scan = new Scan();
		scan.setCaching(1000);        // 1 is the default in Scan, which will be bad for MapReduce jobs
		scan.setCacheBlocks(false);  // don't set to true for MR jobs
		TableMapReduceUtil.initTableMapperJob(
			StringUtils.join(new String[]{UserUtils.getNameSpace(),entity.getMapper().getTableName()}, ":"),        // input table
			scan,               // Scan instance to control CF and attribute selection
			getMapperClass(entity),     // mapper class
			getDataType(entity.getMapper().getOutKeyType()),         // mapper output key
			getDataType(entity.getMapper().getOutValueType()),  // mapper output value
			job);
		job.setReducerClass(getReducerClass(entity));//为job设置Reducer类
		FileOutputFormat.setOutputPath(job, new Path(entity.getReducer().getPath()));//为job设置输出路径
	}
	
	private static Class getMapperClass(MrJobCreateEntity entity) throws Exception{
		return loader.loadClass(entity.getJarPath(),entity.getMapper().getClassName());
	}
	
	private static Class getCombineClass(MrJobCreateEntity entity) throws Exception{
		return loader.loadClass(entity.getJarPath(),entity.getCombinerClass());
	}
	
	private static Class getReducerClass(MrJobCreateEntity entity) throws Exception{
		return loader.loadClass(entity.getJarPath(),entity.getReducer().getClassName());
	}
	
	private static Class getDataType(String typeName){
		switch (typeName) {
		case STRING:
			return Text.class;
		case INT:
			return IntWritable.class;
		default:
			return NullWritable.class;
		}
	}
}
