package cn.doitedu.hbase.hfilemr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;

import java.io.IOException;

/**
 * @date: 2019/7/12
 * @site: www.doitedu.cn
 * @author: hunter.d 涛哥
 * @qq: 657270652
 * @description:
 *
 * 只负责导入已经存在的HFile
 * 相当于shell命令 :  bin/hbase  org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles /path person
 *  bin/hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles <hdfs://storefileoutput> <tablename>
 */
public class LoadHFile {

    public static void main(String[] args) throws IOException {

        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","spark01:2181,spark02:2181");
        LoadIncrementalHFiles loadIncrementalHFiles = new LoadIncrementalHFiles(conf);

        Connection conn = ConnectionFactory.createConnection(conf);

        Table table = conn.getTable(TableName.valueOf("person"));
        Admin admin = conn.getAdmin();
        RegionLocator regionLocator = conn.getRegionLocator(TableName.valueOf("person"));


        loadIncrementalHFiles.doBulkLoad(new Path("hdfs://spark01:8020/json/output"),admin,table,regionLocator);


    }
}
