package cn.doitedu.hbase.day02;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.tool.BulkLoadHFiles;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;

import java.io.IOException;

/**
 * 就是将hfds上的数据，从指定的目录，移动到hbase的表对应的数据目录（移动的同时，还会添加一些元数据信息）
 */
public class HFileBulkLoader {

    public static void main(String[] args) throws IOException {

        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = HBaseConfiguration.create();
        Connection conn = ConnectionFactory.createConnection(conf);
        TableName tableName = TableName.valueOf("tb_student");

        Admin admin = conn.getAdmin();
        Table table = conn.getTable(tableName);
        RegionLocator locator = conn.getRegionLocator(tableName);


        LoadIncrementalHFiles loadIncrementalHFiles = new LoadIncrementalHFiles(conf);

        //BulkLoadHFiles bulkLoadHFiles = BulkLoadHFiles.create(conf);
        //bulkLoadHFiles.bulkLoad(tableName, new Path("hdfs://node-1.51doit.cn:9000/hfile_out2");

        long start = System.currentTimeMillis();
        loadIncrementalHFiles.doBulkLoad(new Path("hdfs://node-1.51doit.cn:9000/hfile_out2"), admin, table, locator);
        long end = System.currentTimeMillis();
        System.out.println("导入总耗时(ms)： "  + (end-start));


        table.close();
        admin.close();
        conn.close();
    }
}