package com.king.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.expressions.Aggregator

import scala.collection.mutable
import scala.collection.mutable.ListBuffer


object SparkSQL10_Req_4{
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME", "atguigu")

    //新的起点: SparkSession

    val sparkConf: SparkConf = new SparkConf().setAppName("SparkSQL").setMaster("local[*]")

    val spark: SparkSession =
          SparkSession.builder()
            //.appName("SparkSQL")
            //.master("local[*]")
            .config(sparkConf)
            .enableHiveSupport()  // 启用hive的支持
            .getOrCreate()

    var sc = spark.sparkContext
    //写代码不管用不用都导入。

    spark.udf.register("cityMark",functions.udaf(new CityMarkAgg))

    spark.sql(
      """
        |select
        |  t3.area,
        |  t3.product_name,
        |  t3.p_click_count,
        |  t3.c_m,
        |  t3.rk
        |from
        |  (
        |select
        |  t2.area,
        |  t2.product_name,
        |  t2.p_click_count,
        |  t2.c_m ,
        |  rank() over( partition by t2.area order by t2.p_click_count desc ) rk
        |from
        |  (
        |select
        |  t1.area,
        |  t1.product_name,
        |  count(t1.click_product_id) p_click_count,
        |  cityMark(t1.city_name) c_m
        |
        |from
        |  (
        |select
        |    u.click_product_id ,
        |    p.product_name,
        |    c.city_name,
        |    c.area
        |from
        |   user_visit_action  u
        |join
        |   product_info p
        |on
        |   u.click_product_id = p.product_id
        |join
        |   city_info c
        |on
        |   u.city_id  = c.city_id
        |where
        |   u.click_product_id != -1
        |  )t1
        |group by t1.area , t1.product_name
        |  )t2
        |  )t3
        |where t3.rk <=3
      """.stripMargin).show(50 ,false )

    spark.stop()

  }

  case class CityBuffer(var toatlClick : Long , var cityMap :  mutable.Map[String ,Long])

  /**
    * 自定义函数(强类型)
    * 1. 继承Aggregator
    * 2. 确定泛型:
    *     IN:  String
    *     BUF: CityBuffer
    *     OUT: String
    */
  class CityMarkAgg  extends Aggregator[String, CityBuffer,String]{

    override def zero: CityBuffer = CityBuffer(0L,mutable.Map[String,Long]())

    override def reduce(buffer: CityBuffer, cityName: String): CityBuffer = {
      //总点击次数加1
      buffer.toatlClick += 1

      //城市点击次数加1
      val old: Long = buffer.cityMap.getOrElse(cityName,0L)
      buffer.cityMap.put(cityName,old + 1 )

      buffer
    }

    override def merge(b1: CityBuffer, b2: CityBuffer): CityBuffer ={
      //总点击次数
      b1.toatlClick += b2.toatlClick

      //城市的点击次数
      for ((cityName,cityCount ) <- b2.cityMap) {
        val old: Long = b1.cityMap.getOrElse(cityName,0L)
        b1.cityMap.put(cityName,old + cityCount)
      }
      b1
    }

    override def finish(buffer: CityBuffer): String = {

      val result: ListBuffer[String] = ListBuffer[String]()


      //总点击次数
      val totalClick: Long = buffer.toatlClick
      //城市点击次数
      val citys: List[(String, Long)] = buffer.cityMap.toList.sortBy(_._2)(Ordering.Long.reverse).take(2)

      var totalPer : Double = 100L

      //处理点击比例
      for ((cityName,clickCount) <- citys) {
          val per: Double = clickCount * 100  / totalClick.toDouble
          totalPer -= per
          val cityMark: String = cityName +" " + per + "%"
          result.append(cityMark)
      }

      //处理其他
      if(buffer.cityMap.size > 2){
        result.append(s"其他 $totalPer%")
      }

      result.mkString(", ")

    }

    override def bufferEncoder: Encoder[CityBuffer] = Encoders.product

    override def outputEncoder: Encoder[String] = Encoders.STRING
  }
}
