package spark.person

import java.time.LocalDateTime
import java.util

import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Row, SparkSession}
import utils.HBaseUtil

/**
  * Created by zhangbn on 2018/10/24.
  */
object BuildPersonBaseHBase {
  def main(args: Array[String]): Unit = {

    //    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
    //    System.setProperty("HADOOP_USER_NAME", "admin")

    val startTime = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  personBase作业启动.......................  now_ms=${startTime}")

    val sparkConf = new SparkConf()
      .setAppName("personBase")
    //      .setMaster("local[*]")

    sparkConf.set("spark.sql.crossJoin.enabled", "true")

    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()

    spark.sqlContext.sql("use ods")
    //val oriDF = spark.sql("select a.id uuid,b.* from date_save_deok a join t_person_passinfo b on a.pass_id=b.pass_id").limit(10)

//    val oriDF = spark.sql(
//      s"""
//        |select * from
//        |(select view.*,row_number() over (partition by view.uuid order by view.quality_score desc) rank
//        |from (select a.uuid,b.* from
//        |(select * from pid_vid where dt=${args(0)}) a
//        |join
//        |(select * from t_person_passinfo where dt=${args(0)}) b
//        |on a.pass_id=b.pass_id) view) super_view
//        |where rank<=1""".stripMargin
//    )

        val oriDF = spark.sql(
          s"""
             |select * from
             |(select view.*,row_number() over (partition by view.uuid order by view.quality_score desc) rank
             | from (select a.uuid,b.*
             |   from pid_vid a
             |   join t_person_passinfo b
             |   on a.pass_id=b.pass_id) view) super_view
             |where rank<=1""".stripMargin
        )

    //    println(oriDF.rdd.partitions.size)
    //    oriDF.rdd.take(5).foreach(println(_))

    oriDF.rdd.repartition(100).foreachPartition(partitionsFun)

    println("Save to HBase OK...")
    val endTime = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  personBase作业结束.......................  now_ms=${endTime}")

  }

  def partitionsFun(iter: Iterator[Row]) = {
    val lists = new util.ArrayList[Put]
    while (iter.hasNext) {
      val next = iter.next()
      val rowkey = next.getAs[String]("uuid")
      val put = new Put(Bytes.toBytes(rowkey))
      val cf = "info"
      put.addColumn(cf.getBytes, "pass_id".getBytes, next.getAs[String]("pass_id").getBytes)
      put.addColumn(cf.getBytes, "pass_time".getBytes, next.getAs[String]("pass_time").getBytes)
      put.addColumn(cf.getBytes, "camera_id".getBytes, next.getAs[String]("camera_id").getBytes)
      put.addColumn(cf.getBytes, "camera_code".getBytes, Bytes.toBytes(next.getAs[Long]("camera_code")))
      put.addColumn(cf.getBytes, "device_id".getBytes, next.getAs[String]("device_id").getBytes)
      put.addColumn(cf.getBytes, "server_id".getBytes, next.getAs[String]("server_id").getBytes)
      put.addColumn(cf.getBytes, "face_img_url".getBytes, next.getAs[String]("face_img_url").getBytes)
      put.addColumn(cf.getBytes, "environ_img_url".getBytes, next.getAs[String]("environ_img_url").getBytes)
      put.addColumn(cf.getBytes, "quality_score".getBytes, Bytes.toBytes(next.getAs[Double]("quality_score")))
      put.addColumn(cf.getBytes, "age".getBytes, Bytes.toBytes(next.getAs[Int]("age")))
      put.addColumn(cf.getBytes, "gender".getBytes, Bytes.toBytes(next.getAs[Int]("gender")))
      put.addColumn(cf.getBytes, "attractive".getBytes, Bytes.toBytes(next.getAs[Int]("attractive")))
      put.addColumn(cf.getBytes, "eyeglass".getBytes, Bytes.toBytes(next.getAs[Int]("eyeglass")))
      put.addColumn(cf.getBytes, "sunglass".getBytes, Bytes.toBytes(next.getAs[Int]("sunglass")))
      put.addColumn(cf.getBytes, "smile".getBytes, Bytes.toBytes(next.getAs[Int]("smile")))
      put.addColumn(cf.getBytes, "mask".getBytes, Bytes.toBytes(next.getAs[Int]("mask")))
      put.addColumn(cf.getBytes, "race".getBytes, Bytes.toBytes(next.getAs[Int]("race")))
      put.addColumn(cf.getBytes, "eyeopen".getBytes, Bytes.toBytes(next.getAs[Int]("eyeopen")))
      put.addColumn(cf.getBytes, "mouthopen".getBytes, Bytes.toBytes(next.getAs[Int]("mouthopen")))
      put.addColumn(cf.getBytes, "beard".getBytes, Bytes.toBytes(next.getAs[Int]("beard")))
      put.addColumn(cf.getBytes, "feature".getBytes, next.getAs[String]("feature").getBytes)
      put.addColumn(cf.getBytes, "create_time".getBytes, next.getAs[String]("create_time").getBytes)
      lists.add(put)
    }
    val tableName = "bigdata:person-repo"
    val table = HBaseUtil.connection.getTable(TableName.valueOf(tableName.getBytes()))
    try
      table.put(lists)
    catch {
      case ex: Exception => println("save to HBase ERROR：" + ex.printStackTrace())
    } finally table.close()
  }

}

