package hbase.mapreduce;

/**
 * Created by spark on 16-10-8.
 */

import hbase.HBaseUtil;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.util.ArrayList;
import java.util.List;

/**
 * @author Luxh
 */
public class InitData extends Configured implements Tool {

    public static void main(String[] args) throws Exception {
        int exitCode = ToolRunner.run(new InitData(), args);
        System.exit(exitCode);
    }

    @Override
    public int run(String[] args) throws Exception {

        Connection conn = ConnectionFactory.createConnection(getConf());

        // 先删除老表
        HBaseUtil.deleteTable(conn, "word");

        //创建一个word表，只有一个列族content
        HBaseUtil.createTable(conn, "word", "content");

        //获取word表
        Table htable = HBaseUtil.getHTable(conn, "word");

        //创建测试数据
        List<Put> puts = new ArrayList<Put>();

        Put put1 = HBaseUtil.getPut("1", "content", null, "The Apache Hadoop software library is a framework");
        Put put2 = HBaseUtil.getPut("2", "content", null, "The common utilities that support the other Hadoop modules");
        Put put3 = HBaseUtil.getPut("3", "content", null, "Hadoop by reading the documentation");
        Put put4 = HBaseUtil.getPut("4", "content", null, "Hadoop from the release page");
        Put put5 = HBaseUtil.getPut("5", "content", null, "Hadoop on the mailing list");

        puts.add(put1);
        puts.add(put2);
        puts.add(put3);
        puts.add(put4);
        puts.add(put5);

        //提交测试数据
        htable.put(puts);
        htable.close();
        //创建stat表，只有一个列祖result
        HBaseUtil.deleteTable(conn, "stat");
        HBaseUtil.createTable(conn, "stat", "result");

        conn.close();

        return 0;
    }
}
