import org.apache.spark.SparkConf
import org.apache.spark.sql._
import org.apache.spark.sql.expressions.Aggregator

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

object Reqq03 {
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAEM", "root")
    val conf = new SparkConf().setMaster("local[*]").setAppName("sqarkSQL")
    val spark = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()

    spark.sql("use chenL")

    //查询基本数据
    spark.sql(
      """
        |select
        |    u.*,
        |    p.product_name,
        |    c.area,c.city_name
        |from
        |    user_visit_action u
        |    join city_info c on u.city_id = c.city_id
        |    join product_info p on u.click_product_id = p.product_id
        |where
        |    u.click_product_id > 0
        |""".stripMargin).createOrReplaceTempView("t1")

    //根据地区，商品进行数据聚合
    spark.udf.register("cityRemark", functions.udaf(new CityRemarkUDAF()))
    spark.sql(
      """
        |select
        |    area,
        |    product_name,
        |    count(*) as clickCnt,
        |    cityRemark(city_name) as city_remark
        |from
        |    t1
        |group by area, product_name
        |""".stripMargin).createOrReplaceTempView("t2")

    //区域内对点击数量进行排行
    spark.sql(
      """
        |select
        |    *,
        |    rank() over( partition by area order by clickCnt desc ) as rank
        |from t2
          """.stripMargin).createOrReplaceTempView("t3")

    //取前三名
    spark.sql(
      """
        |select *
        |from t3
        |where rank <= 3
        |""".stripMargin).show(false)

    spark.close()
  }
  case class Buffer(var total: Long, var cityMap: mutable.Map[String, Long])

  class CityRemarkUDAF() extends Aggregator[String, Buffer, String]{
    //初始化缓冲区
    override def zero: Buffer = {
      Buffer(0, mutable.Map[String, Long]())
    }

    //更新缓冲区功能
    override def reduce(b: Buffer, a: String): Buffer = {
      b.total += 1
      val newCount = b.cityMap.getOrElse(a , 0L) + 1
      b.cityMap.update(a, newCount)
      b
    }

    //合并缓冲区数据
    override def merge(b1: Buffer, b2: Buffer): Buffer = {
      b1.total += b2.total
      val map1 = b1.cityMap
      val map2 = b2.cityMap
      map2.foreach{
        case (city , cnt) => {
          val newCount = map1.getOrElse(city, 0L) + cnt
          map1.update(city, newCount)
        }
      }
      b1.cityMap = map1
      b1
    }

    //将结果生成字符串信息
    override def finish(reduction: Buffer): String = {
      val remarkList = ListBuffer[String]()
      val totalcnt = reduction.total
      val cityMap = reduction.cityMap
      //降序排列
      val cityCntList = cityMap.toList.sortWith(
        (left, right) => {
          left._2 > right._2
        }
      ).take(2)

      val hashMore = cityMap.size > 2
      var rsum = 0L
      cityCntList.foreach{
        case (city, cnt) => {
          val r = cnt * 100 / totalcnt
          remarkList.append(s"${city} ${r}%")
          rsum += r
        }
      }
      if (hashMore){
        remarkList.append(s"其它 ${100 - rsum}%")
      }
      remarkList.mkString(", ")
    }

    override def bufferEncoder: Encoder[Buffer] = Encoders.product

    override def outputEncoder: Encoder[String] = Encoders.STRING
  }

}
