package com.atguigu.mapreduce01;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;

/**
 * @author lvbingbing
 * @date 2020-07-26 15:14
 */
public class ReadFruitDriver implements Tool {

    private Configuration configuration;

    /**
     * @param args 1、mapReduce名称  2、输入表名称   3、输出表名称   4、startRow  5、stopRow
     * @return
     * @throws Exception
     */
    @Override
    public int run(String[] args) throws Exception {

        Job job = Job.getInstance(configuration, "FruitMapReduce");

        job.setJarByClass(ReadFruitDriver.class);

        Scan scan = new Scan(args[3].getBytes(), args[4].getBytes());

        TableMapReduceUtil.initTableMapperJob(args[1], scan, ReadFruitMapper.class, NullWritable.class, Put.class, job);

        TableMapReduceUtil.initTableReducerJob(args[2], ReadFruitReducer.class, job);

        boolean b = job.waitForCompletion(true);
        return b ? 0 : 1;
    }


    @Override
    public void setConf(Configuration conf) {
        this.configuration = conf;
    }

    @Override
    public Configuration getConf() {
        return configuration;
    }
}
