package com.maxbill.hadoop.reduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

/**
 * @功能 Hadoop Map/Reduce操作工具类
 * @作者 zuoshuai(MaxBill)
 * @日期 2017/11/16
 * @时间 12:12
 */
public class JobsUtils {

    private final static String hdfsPath = "hdfs://127.0.0.1:10000";
    private final static String jobsPath = "hdfs://127.0.0.1:20000";

    /**
     * @功能 获取HDFS的配置信息
     * @作者 zuoshuai(MaxBill)
     * @日期 2017/11/16
     * @时间 12:12
     */
    public static Configuration getConfig() {
        Configuration config = new Configuration();
        config.set("fs.default.name", hdfsPath);
        config.set("mapred.job.tracker", jobsPath);
        return config;
    }

    /**
     * @功能 获取HDFS的job配置信息
     * @作者 zuoshuai(MaxBill)
     * @日期 2017/11/16
     * @时间 12:12
     */
    public static JobConf getJobsConf(String jobName) {
        JobConf jobConf = new JobConf(getConfig());
        jobConf.setJobName(jobName);
        jobConf.setOutputKeyClass(Text.class);
        jobConf.setOutputValueClass(IntWritable.class);
        jobConf.setMapperClass(MyMap.class);
        jobConf.setCombinerClass(MyReduce.class);
        jobConf.setReducerClass(MyReduce.class);
        jobConf.setInputFormat(TextInputFormat.class);
        jobConf.setOutputFormat(TextOutputFormat.class);
        return jobConf;
    }

}
