package spark.person

import java.time.LocalDateTime

import org.apache.spark.SparkConf
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SparkSession}
import utils.JedisUtil

/**
  * Created by zhangbn on 2018/10/25.
  */
object RedisPidVid2Hive {

  def main(args: Array[String]): Unit = {
//    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
//    System.setProperty("HADOOP_USER_NAME", "admin")

    val startTime: Long = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  RedisPidVid2Hive作业启动.......................  now_ms=${startTime}")

    val sparkConf = new SparkConf()
      .setAppName("RedisPidVid2Hive")
//      .setMaster("local[*]")
    sparkConf.set("spark.sql.crossJoin.enabled", "true")

    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()
    spark.sql("use ods")

    val jedis = JedisUtil.getJedis
    val pid_vid = args(0) + "_pid_vid"
    val pvMaps = jedis.hgetAll(pid_vid)
    //    val sMaps = JavaConverters.mapAsScalaMapConverter(pvMaps).asScala
    //    var tList = List[Row]()
    //    val mi=sMaps.iterator
    //    while(mi.hasNext){
    //      val next = mi.next()
    //      tList = tList :+ Row(next._1, next._2)
    //    }
    var tList = List[Row]()
    val it = pvMaps.entrySet().iterator()
    while (it.hasNext) {
      val en = it.next()
      tList = tList :+ Row(en.getKey, en.getValue)
    }
    val pvRdd = spark.sparkContext.parallelize(tList)

    val pvSchema = StructType(
      Seq(
        StructField("pass_id", StringType, true),
        StructField("uuid", StringType, true)
      )
    )

    val pvRdd2DF = spark.createDataFrame(pvRdd, pvSchema)
    println("DF原来的分区数为：" + pvRdd2DF.rdd.partitions.size)
    val pvRdd2DF_new = pvRdd2DF.repartition(Integer.valueOf(args(1)))
    println("DF重分区数后：" + pvRdd2DF_new.rdd.partitions.size)
    pvRdd2DF_new.createOrReplaceTempView("pv_DF");

    spark.sql(
      """
        |create table IF NOT EXISTS ods.pid_vid(
        |pass_id string,
        |uuid string
        |)partitioned by (dt string) STORED AS parquet TBLPROPERTIES('parquet.compression'='SNAPPY')
      """.stripMargin)

    spark.sql(s"INSERT INTO TABLE ods.pid_vid PARTITION(dt='${args(0)}') SELECT * from pv_DF");

    val endTime: Long = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  RedisPidVid2Hive作业结束.......................  now_ms=${endTime}")
  }
}