package com.whoami.idmagic.apprank

import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.storage.StorageLevel

object Sample1 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("apprank")
      .getOrCreate()
    import spark.implicits._

    val targetPercentage: List[Double] = List(0.915, 0.529, 0.516, 0.540, 0.002, 0.313, 0.210, 0.101, 0.072, 0.445,
      0.507, 0.409, 0.304, 0.060, 0.358, 0.246, 0.043, 0.026, 0.282, 0.370, 0.317, 0.052, 0.135, 0.113, 0.058, 0.052,
      0.246, 0.130, 0.066, 0.023, 0.025, 0.088, 0.027, 0.008, 0.131, 0.133)

    var d = spark.read.orc("/user/mz_spt/test/tzp/com.whoami.idmagic.xunji/apprank/sample01")

    for (_ <- 0 to 9) {
      val cnt = d.count.toFloat
      val cur = d.reduce((v1, v2) => {
        val c = v1.getSeq[Int](1)
          .zip(v2.getSeq[Int](1))
          .map(f => f._1 + f._2)
        Row("", c)
      }).getSeq[Int](1).map(f => f / cnt)
      println(cur)
      val diff = cur.zip(targetPercentage).map(f => f._1 - f._2)

      d = d.map(f => {
        val dist = f.getSeq[Int](1).zip(diff).map(g => {
          if (g._1 == 0) {
            math.pow(-1 - g._2, 2)
          } else {
            math.pow(g._1 - g._2, 2)
          }
        }).takeRight(9).sum
        (f.getString(0), f.getSeq[Int](1), dist)
      }).sort($"_3".desc).limit((cnt * 0.8).toInt).toDF()
      d.persist(StorageLevel.MEMORY_AND_DISK_SER)
    }

    d.write.orc("/user/mz_spt/test/tzp/com.whoami.idmagic.xunji/apprank/0902_02")
  }

}
