package com.xx.sparkhbase

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client.{ConnectionFactory, Put}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import java.util

/**
 * saveAsNewAPIHadoopDataset
 *
 * @author tzp
 * @since 2022/6/20
 */
object UserFeature3 {

  case class Employee(key: String, fName: String, lName: String,
                      mName: String, addressLine: String, city: String,
                      state: String, zipCode: String)

  val data = Seq(
    Employee("1", "Abby", "Smith", "K", "3456 maint", "Orlando", "FL", "45235"),
    Employee("2", "Amaya", "Williams", "L", "123 Orange", "Newark", "NJ", "27656"),
    Employee("3", "Alchemy", "Davis", "P", "Warners", "Sanjose", "CA", "34789")
  )
  val cols = Array(
    "id_type",
    "dm_0012_tt_slotid_request_times_sum_7d",
    "dm_0012_tt_slotid_request_times_sum_30d",
    "dm_0012_tt_slotid_show_times_sum_7d",
    "dm_0012_tt_slotid_show_times_sum_30d",
    "dm_0012_tt_ctype_show_times_sum_7d",
    "dm_0012_tt_ctype_show_times_sum_30d",
    "dm_0012_tt_csite_show_times_sum_7d",
    "dm_0012_tt_csite_show_times_sum_30d",
    "dm_0012_tt_slotid_click_times_sum_7d",
    "dm_0012_tt_slotid_click_times_sum_30d",
    "dm_0012_tt_ctype_click_times_sum_7d",
    "dm_0012_tt_ctype_click_times_sum_30d",
    "dm_0012_tt_csite_click_times_sum_7d",
    "dm_0012_tt_csite_click_times_sum_30d",
    "dm_0012_ks_slotid_show_times_sum_7d",
    "dm_0012_ks_slotid_show_times_sum_30d",
    "dm_0012_ks_csite_show_times_sum_7d",
    "dm_0012_ks_csite_show_times_sum_30d",
    "dm_0012_ks_slotid_click_times_sum_7d",
    "dm_0012_ks_slotid_click_times_sum_30d",
    "dm_0012_ks_csite_click_times_sum_7d",
    "dm_0012_ks_csite_click_times_sum_30d",
    "dm_0012_ua",
    "dm_0012_geo",
    "dm_0012_model",
    "dm_0012_follow_days",
    "dm_0012_bind_days",
    "dm_0012_user_nos",
    "dm_0012_cancel_days",
    "dm_0012_follow_state",
    "dm_0012_tencent_csite_show_times_sum_7d",
    "dm_0012_tencent_csite_show_times_sum_30d",
    "dm_0012_tencent_csite_click_times_sum_7d",
    "dm_0012_tencent_csite_click_times_sum_30d",
    "dm_0012_geo_show_times_sum_7d",
    "dm_0012_geo_show_times_sum_30d",
    "dm_0012_ip_show_times_sum_7d",
    "dm_0012_ip_show_times_sum_30d",
    "dm_0012_model_show_times_sum_7d",
    "dm_0012_model_show_times_sum_30d",
    "dm_0012_tt_bid_avg_7d",
    "dm_0012_tt_bid_avg_30d",
    "dm_0008_os",
    "dm_0008_is_register",
    "dm_0008_register_date",
    "dm_0008_is_apply",
    "dm_0008_min_appl_date",
    "dm_0008_max_appl_date",
    "dm_0008_is_ps",
    "dm_0008_min_ps_date",
    "dm_0008_max_ps_date",
    "dm_0008_request_count_1d",
    "dm_0008_put_count_1d",
    "dm_0008_tt_request_count_1d",
    "dm_0008_tt_put_count_1d",
    "dm_0008_tencent_request_count_1d",
    "dm_0008_tencent_put_count_1d",
    "dm_0008_ks_request_count_1d",
    "dm_0008_ks_put_count_1d",
    "dm_0008_imp_count_1d",
    "dm_0008_clk_count_1d",
    "dm_0008_tt_imp_count_1d",
    "dm_0008_tt_clk_count_1d",
    "dm_0008_tt_rta_imp_count_1d",
    "dm_0008_tt_rta_clk_count_1d",
    "dm_0008_tencent_imp_count_1d",
    "dm_0008_tencent_clk_count_1d",
    "dm_0008_tencent_rta_imp_count_1d",
    "dm_0008_tencent_rta_clk_count_1d",
    "dm_0008_ks_imp_count_1d",
    "dm_0008_ks_clk_count_1d",
    "dm_0008_ks_rta_imp_count_1d",
    "dm_0008_ks_rta_clk_count_1d",
    "dm_0008_request_count_3d",
    "dm_0008_put_count_3d",
    "dm_0008_tt_request_count_3d",
    "dm_0008_tt_put_count_3d",
    "dm_0008_tencent_request_count_3d",
    "dm_0008_tencent_put_count_3d",
    "dm_0008_ks_request_count_3d",
    "dm_0008_ks_put_count_3d",
    "dm_0008_imp_count_3d",
    "dm_0008_clk_count_3d",
    "dm_0008_tt_imp_count_3d",
    "dm_0008_tt_clk_count_3d",
    "dm_0008_tt_rta_imp_count_3d",
    "dm_0008_tt_rta_clk_count_3d",
    "dm_0008_tencent_imp_count_3d",
    "dm_0008_tencent_clk_count_3d",
    "dm_0008_tencent_rta_imp_count_3d",
    "dm_0008_tencent_rta_clk_count_3d",
    "dm_0008_ks_imp_count_3d",
    "dm_0008_ks_clk_count_3d",
    "dm_0008_ks_rta_imp_count_3d",
    "dm_0008_ks_rta_clk_count_3d",
    "dm_0008_request_count_1w",
    "dm_0008_put_count_1w",
    "dm_0008_tt_request_count_1w",
    "dm_0008_tt_put_count_1w",
    "dm_0008_tencent_request_count_1w",
    "dm_0008_tencent_put_count_1w",
    "dm_0008_ks_request_count_1w",
    "dm_0008_ks_put_count_1w",
    "dm_0008_imp_count_1w",
    "dm_0008_clk_count_1w",
    "dm_0008_tt_imp_count_1w",
    "dm_0008_tt_clk_count_1w",
    "dm_0008_tt_rta_imp_count_1w",
    "dm_0008_tt_rta_clk_count_1w",
    "dm_0008_tencent_imp_count_1w",
    "dm_0008_tencent_clk_count_1w",
    "dm_0008_tencent_rta_imp_count_1w",
    "dm_0008_tencent_rta_clk_count_1w",
    "dm_0008_ks_imp_count_1w",
    "dm_0008_ks_clk_count_1w",
    "dm_0008_ks_rta_imp_count_1w",
    "dm_0008_ks_rta_clk_count_1w",
    "dm_0008_request_count_2w",
    "dm_0008_put_count_2w",
    "dm_0008_tt_request_count_2w",
    "dm_0008_tt_put_count_2w",
    "dm_0008_tencent_request_count_2w",
    "dm_0008_tencent_put_count_2w",
    "dm_0008_ks_request_count_2w",
    "dm_0008_ks_put_count_2w",
    "dm_0008_imp_count_2w",
    "dm_0008_clk_count_2w",
    "dm_0008_tt_imp_count_2w",
    "dm_0008_tt_clk_count_2w",
    "dm_0008_tt_rta_imp_count_2w",
    "dm_0008_tt_rta_clk_count_2w",
    "dm_0008_tencent_imp_count_2w",
    "dm_0008_tencent_clk_count_2w",
    "dm_0008_tencent_rta_imp_count_2w",
    "dm_0008_tencent_rta_clk_count_2w",
    "dm_0008_ks_imp_count_2w",
    "dm_0008_ks_clk_count_2w",
    "dm_0008_ks_rta_imp_count_2w",
    "dm_0008_ks_rta_clk_count_2w",
    "dm_0008_request_count_1m",
    "dm_0008_put_count_1m",
    "dm_0008_tt_request_count_1m",
    "dm_0008_tt_put_count_1m",
    "dm_0008_tencent_request_count_1m",
    "dm_0008_tencent_put_count_1m",
    "dm_0008_ks_request_count_1m",
    "dm_0008_ks_put_count_1m",
    "dm_0008_imp_count_1m",
    "dm_0008_clk_count_1m",
    "dm_0008_tt_imp_count_1m",
    "dm_0008_tt_clk_count_1m",
    "dm_0008_tt_rta_imp_count_1m",
    "dm_0008_tt_rta_clk_count_1m",
    "dm_0008_tencent_imp_count_1m",
    "dm_0008_tencent_clk_count_1m",
    "dm_0008_tencent_rta_imp_count_1m",
    "dm_0008_tencent_rta_clk_count_1m",
    "dm_0008_ks_imp_count_1m",
    "dm_0008_ks_clk_count_1m",
    "dm_0008_ks_rta_imp_count_1m",
    "dm_0008_ks_rta_clk_count_1m",
    "pday"
  )
  val cf = Bytes.toBytes("c")

  def rowToPuts(r: Row): (ImmutableBytesWritable, Put) = {
    val rk = Bytes.toBytes(r.getAs[String]("device_id"))
    val put = new Put(rk)
    cols.foreach(col => {
      val fieldValue = r.get(r.fieldIndex(col))
      if (fieldValue != null) {
        put.addColumn(cf, Bytes.toBytes(col),
          Bytes.toBytes(fieldValue.toString))
      }
    })
    (new ImmutableBytesWritable(rk), put)
  }

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .getOrCreate()
    val sc = spark.sparkContext

    val df = spark.sql(
      """select
        | *
        | from dp_data_db.feature_all_device_dm_001 where pday='20220526' and device_id is not null
        |""".stripMargin)

    val prepareHBaseToLoad: RDD[(ImmutableBytesWritable, Put)] = df.
      rdd.map(row => rowToPuts(row: Row))

//    val hbaseConf = HBaseConfiguration.create()
//    hbaseConf.set("hbase.mapred.outputable", "fe2")
//    hbaseConf.set("mapreduce.job.outputformat.class", "org.apache.hadoop.hbase.mapreduce.TableOutputFormat")

    val tableName = "fe2"

    val conf: Configuration = HBaseConfiguration.create()
    val job: Job = Job.getInstance(conf)
    job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])
    job.getConfiguration.set(TableOutputFormat.OUTPUT_TABLE, tableName)

    // Save the data to HBase
    try {
      prepareHBaseToLoad.saveAsNewAPIHadoopDataset(job.getConfiguration)
    } catch {
      case e: Exception => {
        if (e.getMessage.equals("Can not create a Path from a null string")) {
          println(" saveAsNewAPIHadoopDataset - Exception caused due to a bug in spark 2.2 - Data is saved in HBASE but still excepton is thrown - java.lang.IllegalArgumentException: Can not create a Path from a null string at org.apache.hadoop.fs.Path.checkPathArg ")
        } else {
          throw (e)
        }
      }
    }
  }
}
