package com.persagy.energy.sink

import com.persagy.energy.utils.HbaseUtil
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.client.{Connection, Put, Table}
import org.apache.hadoop.hbase.util.Bytes

class HbaseSink extends RichSinkFunction[(String, String)]{

  val energy_tableName: String = "energy_data"
  val family: String = "data"
  val value: String = "value"

  var conn: Connection = _
  var energyTable: Table = _

  override def open(parameters: Configuration): Unit = {
    println("create Hbase connection ......")
    conn = HbaseUtil.getHbaseConnection()

    println("create Hbase table ......")
    energyTable = conn.getTable(TableName.valueOf(energy_tableName))
  }

  override def invoke(v: (String, String), context: SinkFunction.Context[_]): Unit = {
    val rowKey: String = v._1
    val put = new Put(Bytes.toBytes(rowKey))
    put.addColumn(Bytes.toBytes(family), Bytes.toBytes(value), Bytes.toBytes(v._2))
    energyTable.put(put)
    println("insert into " + energyTable + " rowKey = " + rowKey)
  }

  override def close(): Unit = {
    try {
      if (energyTable != null) {
        energyTable.close()
      }
      if (conn != null) {
        conn.close()
      }
      println("init Hbase create connection ")
    } catch {
      case e:Exception => System.err.println(e.getMessage)
    }
  }
}
