package com.shengzai.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;


public class PutListData {
    public static void main(String[] args) throws IOException {


        BufferedReader bufferedReader = new BufferedReader(new FileReader("hadoop_code/src/data/student_join.txt"));

        String lines;

        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum","master,node1,node2");
        Connection conn = ConnectionFactory.createConnection(conf);

        TableName tableName = TableName.valueOf("api_test:api_student");
        Table table = conn.getTable(tableName);

        long star = System.currentTimeMillis();
        int count=1;
        while ((lines=bufferedReader.readLine())!=null){
            String[] split = lines.split(",");
            String keyRows = split[0];
            Put put = new Put(keyRows.getBytes());
            put.add("students_info".getBytes(),"name".getBytes(),Bytes.toBytes(split[1]));
            put.add("students_info".getBytes(),"age".getBytes(),Bytes.toBytes(split[2]));
            put.add("students_info".getBytes(),"sex".getBytes(),Bytes.toBytes(split[3]));
            put.add("students_info".getBytes(),"clazz".getBytes(),Bytes.toBytes(split[4]));
            put.add("students_score".getBytes(),"score".getBytes(),Bytes.toBytes(split[5]));
            table.put(put);
            System.out.println("第"+count++ +"条数据插入完毕");
        }
        long end = System.currentTimeMillis();
        System.out.println("插入完毕,总用时:"+(end-star)/1000.0+"秒");


        bufferedReader.close();
        table.close();
        conn.close();

    }
}
