package com.yege.sink;

import com.yege.bean.Student;
import com.yege.table.HTableRow;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.Map;
import java.util.Set;

public class HBaseSinkFunction extends RichSinkFunction<HTableRow> {

    private Connection conn = null;
    private byte[] cfBytes;
    private String tableNameStr;
    private BufferedMutator mutator;
    private int count;
    private int batch = 100;

    public HBaseSinkFunction(String tableName,String cFamily) {
        this.tableNameStr = tableName;
        this.cfBytes = Bytes.toBytes(cFamily);
    }

    public HBaseSinkFunction(String tableName,String cFamily,int batch) {
        this.tableNameStr = tableName;
        this.cfBytes = Bytes.toBytes(cFamily);
        this.batch = batch;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","hadoop10");
        conn = ConnectionFactory.createConnection(conf);
        TableName tableName = TableName.valueOf(tableNameStr);

        // 更高效的插入Hbase
        BufferedMutatorParams params = new BufferedMutatorParams(tableName);
        params.maxKeyValueSize(10485760);
        params.writeBufferSize(1024 * 1024);
        mutator = conn.getBufferedMutator(params);  // mutator做批量添加操作
        count = 0;

    }

    @Override
    public void close() throws Exception {
        super.close();
        if (conn != null){
            conn.close();
        }
    }

    @Override
    public void invoke(HTableRow hTableRow, Context context) throws Exception {
        /*Table table = conn.getTable(TableName.valueOf("t_track_info"));
        Put put = new Put(Bytes.toBytes(value.getId()));
        put.addColumn("cf1".getBytes(),"id".getBytes(),Bytes.toBytes(value.getId()));
        put.addColumn("cf1".getBytes(),"name".getBytes(),Bytes.toBytes(value.getName()));
        put.addColumn("cf1".getBytes(),"age".getBytes(),Bytes.toBytes(value.getAge()));
        */

        Put put = new Put(hTableRow.getRowkey());
        Map<String, byte[]> map = hTableRow.getMap();
        Set<Map.Entry<String, byte[]>> set = map.entrySet();
        for (Map.Entry<String, byte[]> entry : set) {
            put.addColumn("cf1".getBytes(),entry.getKey().getBytes(),entry.getValue());
        }

        mutator.mutate(put);
        count++;
        if (count >= batch) {
            mutator.flush();
            count = 0;
        }
    }
}
