package com.lkr.flink.sink

import com.lkr.flink.tools.SnowflakeIdUtil
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Delete, Put, Table}
import org.apache.hadoop.conf

class MyHBaseSinkFunction extends RichSinkFunction[String]{

  var connection:Connection = _
  var hbTable:Table= _

  override def open(parameters: Configuration): Unit = {
    val configuration: conf.Configuration = HBaseConfiguration.create()
    configuration.set("hbase.zookeeper.quorum", "node01,node02,node03")
    configuration.set("hbase.zookeeper.property.clientPort", "2181")
    connection = ConnectionFactory.createConnection(configuration)
    hbTable = connection.getTable(TableName.valueOf("flink:data_orders"))
  }

  override def close(): Unit = {
    if(null != hbTable){
      hbTable.close();
    }
    if(null != connection){
      connection.close()
    }
  }

  def insertHBase(hbTable: Table,orderObj: String) = {
    val key = SnowflakeIdUtil.getDidID(SnowflakeIdUtil.DCD_SNOWFLAKE)
    val put = new Put(key.toString.getBytes())
    put.addColumn("cf1".getBytes(),"c1".getBytes(),orderObj.getBytes())
    hbTable.put(put)
  }

  def deleteHBaseData(hbTable: Table, orderObj: String) = {
    val delete = new Delete(orderObj.getBytes())
    hbTable.delete(delete)
  }

  override def invoke(value: String, context: SinkFunction.Context[_]): Unit = {
    insertHBase(hbTable,value);
  }








}
