package hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

/**
 * Created by spark on 16-10-8.
 */
public class HBaseUtil {

    public static void createTable(Connection conn, String tableName, String... families) throws IOException {

        Admin admin = conn.getAdmin();

        HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));

        for (String family:
             families) {
            HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
            tableDescriptor.addFamily(columnDescriptor);
        }

        admin.createTable(tableDescriptor);
    }

    public static Table getHTable(Connection conn, String tableName) throws IOException {
        return conn.getTable(TableName.valueOf(tableName));
    }

    public static Put getPut(String rowKey, String family, String qualify, String value) {
        Put put = new Put(Bytes.toBytes(rowKey));

        if (StringUtils.isEmpty(qualify)) {
            put.addColumn(Bytes.toBytes(family), null, Bytes.toBytes(value));
        } else {
            put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualify), Bytes.toBytes(value));
        }

        return put;
    }

    public static void deleteTable(Connection conn, String tableName) {

        try {
            Admin admin = conn.getAdmin();
            admin.disableTable(TableName.valueOf(tableName));
            admin.deleteTable(TableName.valueOf(tableName));
        } catch (IOException e) {
            return;
        }
    }
}
