package HBaseDemo.HBase2HBase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class H2HDriver extends Configured implements Tool {


    @Override
    public int run(String[] strings) throws Exception {
        //获取配置文件
        Configuration conf = this.getConf();
        conf.set("hbase.zookeeper.quorum", "bigdata211");
        //创建job任务
        Job job = Job.getInstance(conf, this.getClass().getSimpleName());
         //配置job
        Scan scan = new Scan();
        scan.setCacheBlocks(false);

        //配置mapper
        TableMapReduceUtil.initTableMapperJob(
                   "student",
                   scan,
                   H2HMapper.class,
                   ImmutableBytesWritable.class,
                   Put.class,
                   job);

        //设置Reducer
        TableMapReduceUtil.initTableReducerJob(
                "student_mr",
                H2HReducer.class,
                job);
        //提交任务
        job.setNumReduceTasks(1);
        boolean isSuccess = job.waitForCompletion(true);

        System.out.println(isSuccess?"成功":"失败");
        return  isSuccess?0:1;


    }

    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        int run = ToolRunner.run(conf, new H2HDriver(), args);
        System.exit(run);

    }
}
