package spark.person

import java.time.LocalDateTime
import java.util

import com.inf.featureCompare.FeatureCompare
import com.sun.jersey.core.util.Base64
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._
import utils.JedisUtil

/**
  * Created by zhangbn on 2018/10/22.
  */
object ClassifyPerson {

  def main(args: Array[String]): Unit = {
//        System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
//        System.setProperty("HADOOP_USER_NAME", "admin")

    val startTime: Long = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  ClassifyPerson作业启动.......................  now_ms=${startTime}")

    val sparkConf = new SparkConf()
      .setAppName("ClassifyPerson")
//          .setMaster("local[*]")
    sparkConf.set("spark.sql.crossJoin.enabled", "true")

    val spark = SparkSession
      .builder()
      .config(sparkConf)
      .enableHiveSupport()
      .getOrCreate()

    spark.sqlContext.sql("use ods")
    val ori = spark.sql(
      s"""
        |select row_number() over(partition by 1 order by a.quality_score desc) as rn, a.pass_id, a.feature
        |from t_person_passinfo a
        |where dt=${args(0)} limit ${args(1)}""".stripMargin
    )
    println("读取数据后DF的分区数：" + ori.rdd.partitions.size)
    val rowArr = ori.collect()
//    rowArr.foreach(println(_))
    val rowRdd = spark.sparkContext.parallelize(rowArr)
    val schema = StructType(List(
      StructField("rn", IntegerType, nullable = false),
      StructField("pass_id", StringType, nullable = false),
      StructField("feature", StringType, nullable = false)
    ))
    val oriDF = spark.sqlContext.createDataFrame(rowRdd, schema)

    import spark.implicits._
    val featureList = oriDF.select("feature")
      .map(row => {
        val str = row.get(0).asInstanceOf[String]
        Base64.decode(str)
      }).collectAsList()

    val featureListBC = spark.sparkContext.broadcast(featureList)
    val rnList = oriDF.select("rn").map(row => row.get(0).asInstanceOf[Int]).collect()
    val rnListBC = spark.sparkContext.broadcast(rnList)
    val passidList = oriDF.select("pass_id").map(row => row.get(0).asInstanceOf[String]).collect()
    val rpMaps = rnList.zip(passidList).toMap
    val rpMapsBC = spark.sparkContext.broadcast(rpMaps)

    //var allList = List[List[Int]]()

    val rf = oriDF.map(row => (row.getAs[Int]("rn"), row.getAs[String]("feature")))

    println("初始时DF的分区数：" + rf.rdd.partitions.size)
    //val rfRdd = rf.rdd.repartition(80)
    val rfRdd = rf.rdd.coalesce(Integer.valueOf(args(2)))
    println("重置后RDD的分区数：" + rfRdd.partitions.size)

    rfRdd.foreachPartition(iterator => {
      val jedis = JedisUtil.getJedis
      val tag = args(0) + "_tag"
      val pid_vid = args(0) + "_pid_vid"
      while (iterator.hasNext) {
        val next = iterator.next()
        if (!jedis.getbit(tag, next._1)) {
          var list = List[Int](next._1)
          val var1 = Base64.decode(next._2)
          val st: Long = System.currentTimeMillis
          //val doubleArr = featureCompByByte(var1, featureListBC.value)
          val doubleArr = new FeatureCompare().featureCompByByte(var1, featureListBC.value)
          val et: Long = System.currentTimeMillis
          println(s"对比一次1：N耗时：  ${et - st}")
          if (!jedis.getbit(tag, next._1)) {
            val vid = rpMapsBC.value.getOrElse(next._1, "")
            for (i <- 0 to (doubleArr.length - 1)) {
              if (doubleArr(i) > args(3).toDouble) {
                val rn = rnListBC.value(i)
                if (!list.contains(rn)) {
                  list = list :+ rn
                }
              }
            }
            val bl: Long = System.currentTimeMillis
            println(s"遍历结果耗时：  ${bl - et}")
            //allList = allList :+ list
            list.foreach(x => {
              jedis.setbit(tag, x, true)
              jedis.sadd(String.valueOf(next._1), String.valueOf(x))
              val pid = rpMapsBC.value.getOrElse(x, "")
              jedis.hsetnx(pid_vid, pid, vid)
            })
            val cc: Long = System.currentTimeMillis
            println(s"存储结果耗时：  ${cc - bl}")
          }
        }
      }
    })

    //    allList.foreach(x => {
    //      println(s"【${x(0)}】的同类：")
    //      val k = rpMapsBC.value.get(x(0))
    //      x.foreach(y => {
    //        val v = rpMapsBC.value.get(y)
    //        println(k.getOrElse("") + "  " + y + "->" + v.getOrElse(""))
    //        println()
    //        jedis.hset("pid_vid", v.getOrElse(""),k.getOrElse(""))
    //      })
    //    })

    val endTime: Long = System.currentTimeMillis
    println(s"[${LocalDateTime.now()}]  ClassifyPerson作业结束.......................  now_ms=${endTime}")
  }

  /**
    * 摸拟调用SO比较相似度
    *
    * @param var1
    * @param var2
    * @return
    */
  def featureCompByByte(var1: Array[Byte], var2: util.List[Array[Byte]]): Array[Double] = {
    val size = var2.size()
    val doubleArr = new Array[Double](size)
    for (i <- 0 to (doubleArr.length - 1)) {
      doubleArr(i) = Math.random()
    }
    doubleArr
  }
}

