package com.lab.hbase.labAPI;

import java.util.ArrayList;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

/*
 * hbase MR 测试表、测试数据初始化
 */
public class HbaseInitTable {
	
	public static void main(String[] args) {
		// TODO Auto-generated method stub
		HbaseInitTable hit = new HbaseInitTable();
		try {
//			初始化连接
			hit.init();
//			删除表
			hit.a1_deleteTable(TableName.valueOf(HBaseMr.tableName));
			hit.a1_deleteTable(TableName.valueOf(HBaseMr.tableName2));
//			创建表
			hit.a2_createTable(TableName.valueOf(HBaseMr.tableName),HBaseMr.colf);
			hit.a2_createTable(TableName.valueOf(HBaseMr.tableName2),HBaseMr.colf);
//			初始化数据
			hit.insertData();
//			关闭连接
			hit.close();
		} catch (MasterNotRunningException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ZooKeeperConnectionException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	
	static Configuration config = null;
	private Connection connection = null;
	private Table table = null;

	/**
	 * 配置
	 */
	@Before
	public void init() throws Exception {
		System.out.println("##连接初始化");
		// 创建hbase配置信息，默认读取hbase-site.xml文件内容进行配置
				config = HBaseConfiguration.create();
				System.out.println("配置文件："+config);
				
//				获取hbase连接
				connection = ConnectionFactory.createConnection(config);
				System.out.println("连接是否关闭： "+connection.isClosed());	
	}
	/**
	 * delete一个表
	 * 
	 * @throws Exception
	 */
	@Test
	@SuppressWarnings("deprecation")
	public void a1_deleteTable(TableName tn) throws MasterNotRunningException, ZooKeeperConnectionException, Exception {
		System.out.println("=======delete table " + tn.toString());
		Admin admin = connection.getAdmin();
		
		if(admin.tableExists(tn)) {
			admin.disableTable(tn);
			admin.deleteTable(tn);
		}
		
		admin.close();
		
		
	}
	
	/**
	 * 创建一个表
	 * 
	 * @throws Exception
	 */
	@Test
	public void a2_createTable(TableName tableName,String colf) throws Exception {
		System.out.println("=====create table " + tableName.toString());
		// 创建表管理类
		// HBaseAdmin admin = new HBaseAdmin(); // hbase表管理
		Admin admin = connection.getAdmin();
		
		HTableDescriptor desc = new HTableDescriptor(tableName);
		// 创建列族的描述类
		HColumnDescriptor family = new HColumnDescriptor(colf); // 列族
		
		// 将列族添加到表中
		desc.addFamily(family);
		// 创建表
		admin.createTable(desc); // 创建表
		admin.close();
		
		
	}

	

	/**
	 * 向hbase中增加数据
	 * 
	 * @throws Exception
	 */
	@SuppressWarnings({ "deprecation", "resource" })
	@Test
	public void insertData() throws Exception {
		System.out.println("======insert data to table.");
//		获取待处理的table对象
		table = connection.getTable(TableName.valueOf(HBaseMr.tableName));
		System.out.println("获取表："+table);
		ArrayList<Put> lp = new ArrayList<Put>();
		Put p1 = new Put(Bytes.toBytes("1"));
		p1.addColumn(HBaseMr.colf.getBytes(), HBaseMr.col.getBytes(),	("The Apache Hadoop software library is a framework").getBytes());
		lp.add(p1);
		Put p2 = new Put(Bytes.toBytes("2"));p2.addColumn(HBaseMr.colf.getBytes(),HBaseMr.col.getBytes(),("The common utilities that support the other Hadoop modules").getBytes());
		lp.add(p2);
		Put p3 = new Put(Bytes.toBytes("3"));
		p3.addColumn(HBaseMr.colf.getBytes(), HBaseMr.col.getBytes(),("Hadoop by reading the documentation").getBytes());
		lp.add(p3);
		Put p4 = new Put(Bytes.toBytes("4"));
		p4.addColumn(HBaseMr.colf.getBytes(), HBaseMr.col.getBytes(),("Hadoop from the release page").getBytes());
		lp.add(p4);
		Put p5 = new Put(Bytes.toBytes("5"));
		p5.addColumn(HBaseMr.colf.getBytes(), HBaseMr.col.getBytes(),("Hadoop on the mailing list").getBytes());
		lp.add(p5);
		// 插入数据
		table.put(lp);
		
		System.out.println("");
		System.out.println("=== scan data");
		Scan scan = new Scan();
		scan.setLimit(10);
		ResultScanner scanner = table.getScanner(scan);
		for (Result result : scanner) {
			System.out.print("rowkey：" + Bytes.toString(result.getRow())+"  ");
			System.out.print(HBaseMr.col +":"+Bytes.toString(result.getValue(Bytes.toBytes(HBaseMr.colf), Bytes.toBytes(HBaseMr.col)))+" | ");
			System.out.println("");
		}
		
		table.close();
	}
	
	
	@After
	public void close() throws Exception {
		System.out.println("=====close table and connection on hbase.");
		
		connection.close();
		
		System.out.println("********************************************");
		System.out.println("********************************************");
		System.out.println("");
		System.out.println("");
	}
}
