package com.shujia.hbase.api;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;

public class LoadIncrementalHFileToHBase {
    public static void main(String[] args) throws Exception {
        Configuration conf = HBaseConfiguration.create();
        //指定zk连接地址
        conf.set("hbase.zookeeper.quorum", "node2:2181,node3:2181.node1:2181");
        HTable table = new HTable(conf, "stu3");

        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);

        //将数据移动到hbase表的目录下，同时自索引表中建立索引
        loader.doBulkLoad(new Path("/data/hfile"), table);

        //hadoop jar 包名  类名
        // java  包名
    }
}
