package cn.doitedu.dataservice

import java.util

import com.alibaba.fastjson.JSON
import com.google.gson.Gson
import org.apache.commons.lang3.time.DateFormatUtils
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.client.ConnectionFactory
import org.apache.hadoop.hbase.{HBaseConfiguration, KeyValue, TableName}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ListBuffer

/**
 * 需要提前在hbase中创建好数据导入的目标表
 *
 * hbase >  create 'session_view','f'
 *
 */
object SessionViewDataBulkLoader {

  def genHfile(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("session可视化数据导入hbase")
      .enableHiveSupport()
      .master("local[*]")
      .getOrCreate()


    // 读取hive表数据
    val sessionTable = spark.read.table("dws18.traffic_aggr_session").where("dt = '2020-12-11'")
    //val pvTable = spark.read.table("事件明细表").where("dt='2020-12-11' and eventid='pageView'")


    /**
     * guid                    string
     * session_id              string
     * start_time              bigint
     * end_time                bigint
     * in_page                 string
     * out_page                string
     * pv_cnt                  bigint
     * isnew                   int
     * hour_segment            int
     * province                string
     * city                    string
     * district                string
     * device_type             string
     * release_channel         string
     * app_version             string
     * os_name                 string
     * dt                      string
     */
    // 整理数据成目标数据结构  (ImmutableBytesWritable,KeyValue)
    val tmp: RDD[(String, (String, String, String))] = sessionTable.rdd.flatMap(row=>{
      val session_id = row.getAs[String]("session_id")
      val start_time = row.getAs[Long]("start_time")
      val end_time = row.getAs[Long]("end_time")
      val in_page = row.getAs[String]("in_page")
      val out_page = row.getAs[String]("out_page")
      val pv_cnt = row.getAs[Long]("pv_cnt")
      val isnew = row.getAs[Int]("isnew")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val device_type = row.getAs[String]("device_type")
      val os_name = row.getAs[String]("os_name")

      val dtStr = DateFormatUtils.format(start_time, "yyyyMMdd")

      val lst = new ListBuffer[(String, (String,String,String))]
      lst += ((dtStr+session_id,("f","start_time",(end_time-start_time).toString)))
      lst += ((dtStr+session_id,("f","in_page",in_page.toString)))
      lst += ((dtStr+session_id,("f","out_page",out_page.toString)))
      lst += ((dtStr+session_id,("f","pv_cnt",pv_cnt.toString)))
      lst += ((dtStr+session_id,("f","isnew",isnew.toString)))
      lst += ((dtStr+session_id,("f","province",province.toString)))
      lst += ((dtStr+session_id,("f","city",city.toString)))
      lst += ((dtStr+session_id,("f","device_type",device_type.toString)))
      lst += ((dtStr+session_id,("f","os_name",os_name.toString)))
      lst
    })

    // 对kv数据进行排序，排序的依据优先级：  rowkey，  family ，qualifier
    val sorted = tmp.sortBy(tp=>(tp._1,tp._2._1,tp._2._2))

    // 转换类型
    val keyValueData: RDD[(ImmutableBytesWritable, KeyValue)] = sorted.map(kv=>{

      val rowkey = new ImmutableBytesWritable(kv._1.getBytes())

      val value = new KeyValue(kv._1.getBytes(), "f".getBytes(), kv._2._2.getBytes(), kv._2._3.getBytes())

      (rowkey,value)
    })


    // 生成HFILE文件
    val conf = HBaseConfiguration.create()
    conf.set("fs.defaultFS","hdfs://doitedu01:8020")
    conf.set("hbase.zookeeper.quorum","doitedu01:2181,doitedu02:2181,doitedu03:2181")
    // conf.set("hbase.mapreduce.hfileoutputformat.table.name","session_view")
    val job = Job.getInstance(conf)


    val conn = ConnectionFactory.createConnection(conf)
    val table = conn.getTable(TableName.valueOf("session_view"))
    val locator = conn.getRegionLocator(TableName.valueOf("session_view"))


    // 配置hfileoutputformat的相关参数
    // hbase.mapreduce.hfileoutputformat.table.name
    HFileOutputFormat2.configureIncrementalLoad(job,table,locator)

    keyValueData.saveAsNewAPIHadoopFile("/dataservice/session_view/tmp/",classOf[ImmutableBytesWritable],classOf[KeyValue],classOf[HFileOutputFormat2],job.getConfiguration)


    spark.close()

    conn.close()



  }


  def genHfileJson(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("session可视化数据导入hbase")
      .enableHiveSupport()
      .master("local[*]")
      .getOrCreate()


    // 读取hive表数据
    val sessionTable = spark.read.table("dws18.traffic_aggr_session").where("dt = '2020-12-11'")
    //val pvTable = spark.read.table("事件明细表").where("dt='2020-12-11' and eventid='pageView'")


    /**
     * guid                    string
     * session_id              string
     * start_time              bigint
     * end_time                bigint
     * in_page                 string
     * out_page                string
     * pv_cnt                  bigint
     * isnew                   int
     * hour_segment            int
     * province                string
     * city                    string
     * district                string
     * device_type             string
     * release_channel         string
     * app_version             string
     * os_name                 string
     * dt                      string
     */
    // 整理数据成目标数据结构  (ImmutableBytesWritable,KeyValue)
    val tmp  = sessionTable.rdd.map(row=>{
      val session_id = row.getAs[String]("session_id")
      val start_time = row.getAs[Long]("start_time")
      val end_time = row.getAs[Long]("end_time")
      val in_page = row.getAs[String]("in_page")
      val out_page = row.getAs[String]("out_page")
      val pv_cnt = row.getAs[Long]("pv_cnt")
      val isnew = row.getAs[Int]("isnew")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val device_type = row.getAs[String]("device_type")
      val os_name = row.getAs[String]("os_name")


      val mp = new util.HashMap[String, String]()
      mp.put("session_timelong",(end_time-start_time).toString)
      mp.put("in_page",in_page)
      mp.put("out_page",out_page)
      mp.put("isnew",isnew.toString)
      mp.put("province",province.toString)
      mp.put("city",city.toString)
      mp.put("device_type",device_type.toString)
      mp.put("os_name",os_name.toString)
      mp.put("session_id",session_id.toString)

      val gson = new Gson()
      val json = gson.toJson(mp)

      val dtStr = DateFormatUtils.format(start_time, "yyyyMMdd")

      (dtStr+session_id,json)
    })

    // 对kv数据进行排序，排序的依据优先级：  rowkey，  family ，qualifier
    val sorted = tmp.sortBy(tp=>tp._1)

    // 转换类型
    val keyValueData: RDD[(ImmutableBytesWritable, KeyValue)] = sorted.map(kv=>{

      val rowkey = new ImmutableBytesWritable(kv._1.getBytes())

      val value = new KeyValue(kv._1.getBytes, "f".getBytes, "q".getBytes, (kv._2).getBytes)

      (rowkey,value)
    })


    // 生成HFILE文件
    val conf = HBaseConfiguration.create()
    conf.set("fs.defaultFS","hdfs://doitedu01:8020")
    conf.set("hbase.zookeeper.quorum","doitedu01:2181,doitedu02:2181,doitedu03:2181")
    // conf.set("hbase.mapreduce.hfileoutputformat.table.name","session_view")
    val job = Job.getInstance(conf)


    val conn = ConnectionFactory.createConnection(conf)
    val table = conn.getTable(TableName.valueOf("session_view"))
    val locator = conn.getRegionLocator(TableName.valueOf("session_view"))


    // 配置hfileoutputformat的相关参数
    // hbase.mapreduce.hfileoutputformat.table.name
    HFileOutputFormat2.configureIncrementalLoad(job,table,locator)

    keyValueData.saveAsNewAPIHadoopFile("/dataservice/session_view/tmp2/",classOf[ImmutableBytesWritable],classOf[KeyValue],classOf[HFileOutputFormat2],job.getConfiguration)


    spark.close()

    conn.close()



  }


  def main(args: Array[String]): Unit = {


    // 生成HFILE文件
    genHfileJson(args)

    // 导入生成好的HFILE文件
    val conf = HBaseConfiguration.create()
    conf.set("fs.defaultFS","hdfs://doitedu01:8020")
    conf.set("hbase.zookeeper.quorum","doitedu01:2181,doitedu02:2181,doitedu03:2181")


    val conn = ConnectionFactory.createConnection(conf)
    val admin = conn.getAdmin()


    val table = conn.getTable(TableName.valueOf("session_view"))
    val locator = conn.getRegionLocator(TableName.valueOf("session_view"))


    val loader = new LoadIncrementalHFiles(conf)
    loader.doBulkLoad(new Path("/dataservice/session_view/tmp2/"),admin,table,locator)

    conn.close()


  }

}
