package com.shujia.base;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;

public class Code12PutMore {
    static Connection connection;

    static {
        // 创建连接需要给定连接信息，所以需要给定配置类
        Configuration conf = new Configuration();

        // 由于对HBASE进行操作时，需要连接其Zookeeper 那么需要传入Zookeeper的IP
        conf.set("hbase.zookeeper.quorum", "node1,node2,master");
        // 获取连接对象 通过ConnectionFactory来创建连接对象

        try {
            connection = ConnectionFactory.createConnection(conf);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    // 将1000条学生数据写入到HBase中
    public static void main(String[] args) throws IOException {

        // api:tb1 中存在两个列族 info1 info2
        // ID name gender age clazz
        // 1500100001,施笑槐,22,女,文科六班
        TableName tableName = TableName.valueOf("api:tb1");
        Table table = connection.getTable(tableName);

        // 1.读取文件数据

        FileReader fileReader = new FileReader("data/student1000.txt");
        BufferedReader bufferedReader = new BufferedReader(fileReader);


        /**
         * 由于读取一行数据再往HBASE中写入一条相对来说效率不高，
         *      所以可以优化成当有100条数据时，往HBASE中统一插入
         */

        List<Put> puts = new ArrayList<>();

        String line;
        while ((line = bufferedReader.readLine()) != null) {
            String[] columns = line.split(",");

            Put put = new Put(columns[0].getBytes(StandardCharsets.UTF_8));
            put = put.addColumn(
                    "info1".getBytes(StandardCharsets.UTF_8),
                    "name".getBytes(StandardCharsets.UTF_8),
                    columns[1].getBytes(StandardCharsets.UTF_8)
            );
            put = put.addColumn(
                    "info1".getBytes(StandardCharsets.UTF_8),
                    "age".getBytes(StandardCharsets.UTF_8),
                    Bytes.toBytes(Integer.parseInt(columns[2]))
            );
            put = put.addColumn(
                    "info1".getBytes(StandardCharsets.UTF_8),
                    "gender".getBytes(StandardCharsets.UTF_8),
                    columns[3].getBytes(StandardCharsets.UTF_8)
            );

            put = put.addColumn(
                    "info2".getBytes(StandardCharsets.UTF_8),
                    "clazz".getBytes(StandardCharsets.UTF_8),
                    columns[4].getBytes(StandardCharsets.UTF_8)
            );

            puts.add(put);
            if (puts.size() >= 100) {
                //当数据为100条时，需要提交至HBASE中插入
                table.put(puts);
                puts.clear();
            }
        }

        // 将剩余的列表中的数据提交至HBASE
        table.put(puts);

        table.close();
        connection.close();
    }
}
