package com.example.yckjbigdataflink.sink;

import com.example.yckjbigdataflink.MainFlinkApp;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;

import org.apache.hadoop.hbase.util.Bytes;

public class HBaseUpdateSink extends RichSinkFunction<MainFlinkApp.MyRow> {

    private transient Connection connection;
    private transient Table table;

    @Override
    public void open(Configuration parameters) throws Exception {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        config.set("hbase.zookeeper.quorum", "zk1,zk2,zk3");
        connection = ConnectionFactory.createConnection(config);
        table = connection.getTable(TableName.valueOf("my_hbase_table"));
    }

    @Override
    public void invoke(MainFlinkApp.MyRow record, Context context) throws Exception {
        if ("INSERT".equals(record.getOp())) {
            Put put = new Put(Bytes.toBytes(record.getId()));
            put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes(record.getName()));
            put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes(record.getAge()));
            table.put(put);
        } else if ("DELETE".equals(record.getOp())) {
            Delete delete = new Delete(Bytes.toBytes(record.getId()));
            table.delete(delete);
        } else if ("UPDATE".equals(record.getOp())) {
            // HBase 中更新 = 覆盖写
            Put put = new Put(Bytes.toBytes(record.getId()));
            put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes(record.getAge()));
            table.put(put);
        }
    }

    @Override
    public void close() throws Exception {
        if (table != null) table.close();
        if (connection != null) connection.close();
    }
}
