package net.bwie.flink.utils;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;

/**
 * Flink中自定义Sink，将DataStream流中数据写入HBase表中，要求DataStream数据类型为二元组，其中key为RowKey，value为各个字段值
 * @author xuanyu
 * @date 2025/10/31
 */
public class HbaseSinkFunction extends RichSinkFunction<Tuple2<String, String>> {

	/**
	 * 表名称，列簇名称和字段名称
	 */
	private String tableName ;
	private String familyName ;
	private String columnNames ;

	public HbaseSinkFunction(String tableName, String familyName, String columnNames){
		this.tableName = tableName ;
		this.familyName = familyName;
		this.columnNames = columnNames;
	}

	/**
	 * 定义变量，数据库连接及Table对象
	 */
	private Connection connection = null ;
	private Table table = null ;

	@Override
	public void open(Configuration parameters) throws Exception {
		// 1.创建配置对象，设置属性值，比如连接ZK集群信息
		org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create() ;
		conf.set(HConstants.CLIENT_ZOOKEEPER_QUORUM, "node101,node102,node103");
		conf.set(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT, "2181");
		// 2.实例化Connection连接
		connection = ConnectionFactory.createConnection(conf) ;
		// 3.实例化Table对象
		table = connection.getTable(TableName.valueOf(tableName));
	}

	@Override
	public void invoke(Tuple2<String, String> value, Context context) throws Exception {
		// e. 将每条数据VehicleDataObj对象封装到Put对象
		Put put = new Put(Bytes.toBytes(value.f0));
		// 添加列
		String jsonData = value.f1;
		JSONObject jsonObject = JSON.parseObject(jsonData);
		for(String columnName: columnNames.split(",")){
			// 依据列名称获取列值
			Object columnObject = jsonObject.get(columnName);
			// 当列值不为null时，插入hbase表
			if(null != columnObject){
				put.addColumn(
					Bytes.toBytes(familyName), Bytes.toBytes(columnName) , Bytes.toBytes(columnObject.toString())
				);
			}
		}
		// f. 将put数据插入到表中
		table.put(put);
	}

	@Override
	public void close() throws Exception {
		// 关闭连接
		if(null != table){
			table.close();
		}
		if(null != connection){
			connection.close();
		}
	}

}
