package com.xx.sparkhbase

import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.client.ConnectionFactory
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
import org.apache.hadoop.hbase.spark.{HBaseContext, KeyFamilyQualifier}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.spark.sql.{Row, SparkSession}

/**
 *
 * shc hbaseBulkLoad
 * safe集群
 *
 * @author tzp
 * @since 2022/6/20
 */
object BulkLoad {
  val cols = Array(
    "user_no",
    "mobile_no",
    "deviceid",
    "id_type",
    "is_ps",
    "is_p18",
    "is_p24",
    "is_p36",
    "is_p18_peipao",
    "is_p24_peipao",
    "is_p36_peipao",
    "is_big",
    "sx_amt",
    "max_duotou",
    "product_code",
    "dt",
    "media",
    "jf_risk_price",
    "jf_risk_price_peipao",
    "sc_risk_price"
  )
  val tableName = "dataeco-test"
  val stagingFolder = "/home/hdp-dataeco-data/hbasetest3"
  val family: Array[Byte] = Bytes.toBytes("e")

  def rowToPuts(r: Row): Iterator[(KeyFamilyQualifier, Array[Byte])] = {
    val rk = Bytes.toBytes(r.getAs[String]("user_no"))
    cols.map(col => {
      val keyFamilyQualifier = new KeyFamilyQualifier(rk, family, Bytes.toBytes(col))
      val fieldValue = r.get(r.fieldIndex(col))
      if (fieldValue != null) {
        (keyFamilyQualifier, Bytes.toBytes(fieldValue.toString))
      } else {
        null
      }
    }).filter(_ != null).iterator
  }

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .getOrCreate()
    val sc = spark.sparkContext

    val df = spark.sql(
      """select
        | *
        | from hdp_credit.oot_wj_sx_v2
        |""".stripMargin)

    import org.apache.hadoop.hbase.spark.HBaseRDDFunctions.GenericHBaseRDDFunctions
    val config = HBaseConfiguration.create()
//    config.addResource()
    val hc = new HBaseContext(sc, config)

    df.rdd.hbaseBulkLoad(hc, TableName.valueOf(tableName), t => {
      rowToPuts(t)
    }, stagingFolder)


    val load = new LoadIncrementalHFiles(config)
    val conn = ConnectionFactory.createConnection(config)
    val admin = conn.getAdmin
    val table = conn.getTable(TableName.valueOf(tableName))
    load.doBulkLoad(new Path(stagingFolder), admin, table, conn.getRegionLocator(TableName.valueOf(tableName)))

    //    val loadTool = BulkLoadHFiles.create(config)
    //    loadTool.bulkLoad(TableName.valueOf(tableName), new Path(stagingFolder))
  }
}
