package com.example.realtimeeb.dim

import com.example.realtimeeb.utils.HBaseUtils

import java.util
import org.apache.flink.configuration
import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}
import org.apache.hadoop.hbase.{TableName}
import org.apache.hadoop.hbase.client.{Connection, Result, ResultScanner, Scan, Table}
import org.apache.hadoop.hbase.util.Bytes

import scala.collection.JavaConverters._

class HbaseReader extends RichSourceFunction[(String, String)] {

  private var conn: Connection = null
  private var table: Table = null;
  private var scan: Scan = null;


  override def open(parameters: configuration.Configuration): Unit = {
    conn = HBaseUtils.getConnection()
    val tableName: TableName = TableName.valueOf("lagou_area")
    val cf1: String = "f1"
    table = conn.getTable(tableName)
    scan = new Scan()
    scan.addFamily(Bytes.toBytes(cf1))

  }

  /**
   * 遍历整张表，得到数据然后发送到下游
   * @param ctx
   */
  override def run(ctx: SourceFunction.SourceContext[(String, String)]): Unit = {
    val rs: ResultScanner = table.getScanner(scan)
    val iterator: util.Iterator[Result] = rs.iterator()
    while (iterator.hasNext) {
      val result: Result = iterator.next()
      val rowKey: String = Bytes.toString(result.getRow)
      val buffer: StringBuffer = new StringBuffer()

      result.listCells().asScala.foreach(cell => {
        val value: String = Bytes.toString(cell.getValueArray, cell.getValueOffset, cell.getValueLength)
        buffer.append(value).append("-")
      })

      val valueString: String = buffer.replace(buffer.length() - 1, buffer.length(), "").toString
      ctx.collect((rowKey, valueString))
    }

  }

  override def cancel(): Unit = {


  }

  override def close(): Unit = {
    try {
      if(table != null) {
        table.close()
      }
      if(conn != null) {
        conn.close()
      }
    } catch {
      case e:Exception => println(e.getMessage)
    }
  }
}
