package com.wudl.flink.hbase.slink;

import com.wudl.flink.hbase.model.User;
import com.wudl.flink.hbase.utils.DateUtils;
import com.wudl.flink.hbase.utils.HbaseUtils;
import com.wudl.flink.hbase.utils.RowKeyUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.text.ParseException;


/**
 * @author ：wudl
 * @date ：Created in 2021-12-08 21:43
 * @description：
 * @modified By：
 * @version: 1.0
 */
public class HbaseSink extends RichSinkFunction<User> {
    private final static Logger logger = LoggerFactory.getLogger(HbaseSink.class);
    Connection conn = null;

    private String tableName = "wudluser";
    private BufferedMutator mutator = null;


    @Override
    public void open(Configuration parameters) throws Exception {
        //从上下文获取到全局参数
        super.open(parameters);
        org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
        //设置缓存对象的多大、多长时间刷写到HBase中
        //缓存写入HBaes，与Kafka的缓存写入Kafka有异曲同工之秒
        BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tableName));
        //设置缓存达到一定的大小：10M
        params.writeBufferSize(10 * 1024 * 1024L);
        //设置缓存达到一定的时间：5s
        params.setWriteBufferPeriodicFlushTimeoutMs(5 * 1000L);
            conn = HbaseUtils.getConnect();
        BufferedMutator bufferedMutator = conn.getBufferedMutator(params);
        try {
            mutator = conn.getBufferedMutator(params);
        } catch (IOException e) {
            logger.error("当前获取bufferedMutator 失败：" + e.getMessage());
        }
    }

    //5. 重写 invoke 方法，将读取的数据写入到 hbase
    @Override
    public void invoke(User value, Context context) throws Exception {
        //5.1 setDataSourcePut输入参数value，返回put对象
        try {
            Put put = setDataSourcePut(value);
            // 可以批量写入
//            List<Put> listPut = new ArrayList<>();
            mutator.mutate(put);
            //5.2 指定时间内的数据强制刷写到hbase
            mutator.flush();
        } catch (Exception ex) {
            ex.printStackTrace();
            logger.error("写入到hbase失败：" + ex.getMessage());
        }
    }

    //4.重写close方法
    @Override
    public void close() throws Exception {
        //4.1 关闭hbase 表和连接资源
        HbaseUtils.close(mutator, conn);
    }

    //6. 实现 setDataSourcePut 方法

    /**
     * 每条对象生成一个 put
     * 1.表名 2.rowkey 3.列簇  4.列名和列值
     *
     * @param user
     * @return
     */
    private Put setDataSourcePut(User user) throws ParseException {
        byte[] rowKey = RowKeyUtils.getRowkey(user);
        String cf = "cf";
        Put put = new Put(rowKey);
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("id"), Bytes.toBytes(String.valueOf(user.getId())));
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("name"), Bytes.toBytes(user.getName()));
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("age"), Bytes.toBytes(String.valueOf(user.getAge())));
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("address"), Bytes.toBytes(user.getAddress()));
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("createTime"), Bytes.toBytes(user.getCreateTime()));
        put.addColumn(Bytes.toBytes(cf), Bytes.toBytes("updateDate"), Bytes.toBytes(DateUtils.getCurrentTime()));
        return put;
    }

}
