package com.atguigu.mapreduce02;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;

/**
 * @author lvbingbing
 * @date 2020-07-26 21:34
 */
public class ReadFruitHDFSDriver implements Tool {

    private Configuration configuration;

    @Override
    public int run(String[] args) throws Exception {

        Job job = Job.getInstance(configuration, "mapReduce2");

        job.setJarByClass(ReadFruitHDFSDriver.class);

        TableMapReduceUtil.initTableMapperJob("fruit", new Scan(), ReadFruitHDFSMapper.class, Text.class, Text.class, job);

        job.setNumReduceTasks(0);

        FileOutputFormat.setOutputPath(job, new Path("hdfs://hadoop102:9000/out"));

        boolean b = job.waitForCompletion(true);

        return b ? 0 : 1;
    }

    @Override
    public void setConf(Configuration conf) {
        this.configuration = conf;
    }

    @Override
    public Configuration getConf() {
        return configuration;
    }
}
