package com.zhang.sparksql_2

import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.{Encoder, Encoders, SparkSession, functions}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

/**
 * @title:
 * @author: zhang
 * @date: 2022/2/19 09:36 
 */
object SparkSQL05_req {

  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "zhang")

    //todo 获取执行环境
    val spark: SparkSession = SparkSession
      .builder()
      .enableHiveSupport()
      .master("local[*]")
      .appName("DataFrame")
      .getOrCreate()

    spark.udf.register("cityRemarkUDAF", functions.udaf(new CityRemarkUDAF()))
    spark.sql(
      """
        |select
        |*
        |from(
        |    select
        |        *,
        |        rank() over( partition by area order by clickCnt desc) rk
        |    from(
        |          select
        |            area,
        |            product_name,
        |            cityRemarkUDAF(city_name),
        |            count(*) clickCnt
        |          from(
        |            select
        |              *,
        |              p.product_name,
        |              c.city_name,
        |              c.area
        |            from user_visit_action a
        |            join product_info p on a.click_product_id = p.product_id
        |            join city_info c on a.city_id = c.city_id
        |            where a.click_product_id != -1
        |          )t1 group by area,product_name
        |    )t2
        |)t3 where rk<=3
        |""".stripMargin).show(false)

    // todo 关闭环境
    spark.stop()
  }

  case class RemarkBuffer(var total: Long, var cityMap: mutable.Map[String, Long])

  class CityRemarkUDAF extends Aggregator[String, RemarkBuffer, String] {
    override def zero: RemarkBuffer = {
      RemarkBuffer(0L, mutable.Map[String, Long]())
    }

    override def reduce(buffer: RemarkBuffer, city: String): RemarkBuffer = {
      buffer.total += 1
      val map: mutable.Map[String, Long] = buffer.cityMap
      val oldCnt: Long = map.getOrElse(city, 0L)
      map.update(city, oldCnt + 1)
      buffer.cityMap = map
      buffer
    }

    override def merge(b1: RemarkBuffer, b2: RemarkBuffer): RemarkBuffer = {
      b1.total += b2.total
      val map1: mutable.Map[String, Long] = b1.cityMap
      val map2: mutable.Map[String, Long] = b2.cityMap
      map2.foreach {
        case (city, cnt) => {
          val oldCnt: Long = map1.getOrElse(city, 0L)
          map1.update(city, oldCnt + cnt)
        }
      }
      b1.cityMap = map1
      b1
    }

    override def finish(buffer: RemarkBuffer): String = {
      val total: Long = buffer.total
      val listBuffer: ListBuffer[String] = ListBuffer()
      val map: mutable.Map[String, Long] = buffer.cityMap
      val tuples: List[(String, Long)] = map.toList.sortBy(_._2)(Ordering.Long.reverse)
      val top: List[(String, Long)] = tuples.take(2)
      var rest = 100L
      top.foreach {
        case (city, cnt) => {
          var r = cnt * 100 / total
          rest -= r
          listBuffer.append(s"$city $r %")
        }
      }
      if (tuples.size>2){
        listBuffer.append(s"其他$rest %")
      }

      listBuffer.mkString(",")
    }

    override def bufferEncoder: Encoder[RemarkBuffer] = Encoders.product

    override def outputEncoder: Encoder[String] = Encoders.STRING
  }
}
