package com.hiscene.structurestream

import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.client.{Put, Table}
import org.apache.spark.sql.ForeachWriter

class HbaseForeachWriter extends ForeachWriter[DeviceData] {

  private var table: Table = null
  private val hbase_table_name: String = "hi"


  override def open(partitionId: Long, version: Long): Boolean = {

    table = HbaseConnectionPool.getConnection.getTable(TableName.valueOf(hbase_table_name))
    true
  }

  // 将数据写入到hbase
  override def process(device: DeviceData): Unit = {
    import org.apache.hadoop.hbase.util.Bytes
    val put = new Put(Bytes.toBytes(device.eid))
    put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("eid"), Bytes.toBytes(device.eid))
    put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("diviceId"), Bytes.toBytes(device.diviceId))
    put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("eventInfo"), Bytes.toBytes(device.eventInfo))
    table.put(put)
  }

  //关闭链接
  override def close(errorOrNull: Throwable): Unit = {
    if (table != null) {
      table.close()
    }
  }


}
