package com.chenjj.bigdata.mapreduce;

import com.chenjj.bigdata.hbase.client.HdfsClientUtil;
import com.chenjj.bigdata.mapreduce.mapper.Dingchang2DingjieMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobContext;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;


public class Dingchang2DingjieRunner {
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        //hadoop home
        System.setProperty("hadoop.home.dir","D:\\tools\\hadoop-2.7.3");
        System.setProperty("HADOOP_USER_NAME","root");//使用root用户操作hdfs
        System.load("D:\\Code\\HdfsClient\\conf\\bin\\hadoop.dll");
        //System.load("D:\\HdfsClient\\conf\\bin\\hadoop64.dll");

        //hadoop配置信息
        Configuration conf = new Configuration();
        conf.addResource("core-site.xml");
        conf.addResource("hdfs-site.xml");
        conf.addResource("yarn-site.xml");
        conf.set("mapred.job.tracker","local"); //本地运行，可以用于debug

        //定义变量
        String classPath = Dingchang2DingjieRunner.class.getClassLoader().getResource("").getPath();
        String hdfsInputPath = "/test/mapreduce/dingchang2dingjie/input/";
        String hdfsOutPath = "/test/mapreduce/dingchang2dingjie/output/";
        String [] localDataPath = {classPath+"dingchang2dingjie/dream.txt",
                                   classPath+"dingchang2dingjie/dream1.txt"};

        //上传本地文件到Hdfs
        for (int i = 0; i < localDataPath.length ; i++) {
            HdfsClientUtil.getInstance().put(localDataPath[i], hdfsInputPath+"file"+i);
        }

        //删除输出目录
        HdfsClientUtil.getInstance().rm(hdfsOutPath);

        //定义job
        Job job = Job.getInstance(conf, "Dingchang2Dingjie");
        job.setJarByClass(Dingchang2DingjieRunner.class);
        job.setMapperClass(Dingchang2DingjieMapper.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        FileInputFormat.addInputPath(job, new Path(hdfsInputPath));//mapper的输入文件,如果是个目录，则目标是该目录下的所有文件。
        FileOutputFormat.setOutputPath(job, new Path(hdfsOutPath));//reduce的输出目录（如果没有reduce，则是mapper的输出目录）

        //设置输出文件类型
        MultipleOutputs.addNamedOutput(job, "namedOutput", TextOutputFormat.class,NullWritable.class, Text.class);

        //取消类似part-r-00000的空文件
        LazyOutputFormat.setOutputFormatClass(job,TextOutputFormat.class);

        //启动job并等待job完成
        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }

    private static class OutFormatter extends TextOutputFormat {
        protected static void setOutputName(JobContext job, String name) {
            job.getConfiguration().set(BASE_OUTPUT_NAME, name);
        }
    }
}
