package spark.sql.project

import org.apache.spark.sql.expressions.{Aggregator, UserDefinedFunction}
import org.apache.spark.sql.{DataFrame, Encoder, Encoders, SaveMode, SparkSession, functions}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

/**
 * @Author Jeremy Zheng
 * @Date 2021/4/7 17:34
 * @Version 1.0
 */
object Top3goods {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession =
      SparkSession.builder().appName("top3").master("local").getOrCreate()
    import spark.implicits._
    val userInfo: DataFrame = spark.read.textFile("D:\\qq文件\\1203368011\\热门商品Top3练习\\user_visit_action.txt")
      .map(line => {
        val splits: Array[String] = line.split("\t")
        val click_product_id: Long = splits(7).toLong
        val city_id: Long = splits(12).toLong
        (click_product_id, city_id)
      }).toDF("product_id", "city_id")

    val productInfo: DataFrame = spark.read.textFile("D:\\qq文件\\1203368011\\热门商品Top3练习\\product_info.txt")
      .map(line => {
        val splits: Array[String] = line.split("\t")
        val product_id: Long = splits(0).toLong
        val product_name: String = splits(1)
        (product_id, product_name)
      }).toDF("product_id", "product_name")

    val cityInfo: DataFrame = spark.read.textFile("D:\\qq文件\\1203368011\\热门商品Top3练习\\city_info.txt")
      .map(line => {
        val splits: Array[String] = line.split("\t")
        val city_id: Long = splits(0).toLong
        val city_name: String = splits(1)
        val area: String = splits(2)
        (city_id, city_name, area)
      }).toDF("city_id", "city_name", "area")

    userInfo.createOrReplaceTempView("user_info")
    productInfo.createOrReplaceTempView("product_info")
    cityInfo.createOrReplaceTempView("city_info")

    //三表关联，获取所需字段的基础数据
    spark.sql(
    """
      | select
      | c.area,
      | p.product_name,
      | c.city_name
      | from user_info u
      | join product_info p on u.product_id = p.product_id
      | join city_info c on u.city_id = c.city_id
      | where u.product_id != -1
      |""".stripMargin).createOrReplaceTempView("t1")

    //计算每个区域、每个商品的点击量，以及每个区域每个商品前二点击率的城市
    val remark: UserDefinedFunction = spark.udf.register("remark", functions.udaf(new Remark))
    spark.sql(
      """
        | select
        | area,
        | product_name,
        | count(1) clicks,
        | remark(city_name) city_remark
        | from t1
        | group by area,product_name
        |""".stripMargin).createOrReplaceTempView("t2")

    //按照区域对商品点击量进行排名
    spark.sql(
      """
        | select
        | area,
        | product_name,
        | clicks,
        | city_remark,
        | row_number() over(partition by area order by clicks desc) as rank
        | from t2
        |""".stripMargin).createOrReplaceTempView("t3")

    //计算每个区域前三点击量的商品，以及排名前三的商品在各个城市的分布
    spark.sql(
      """
        |select
        | area,
        | product_name,
        | clicks,
        | city_remark
        |from t3
        |where rank <= 3
        |""".stripMargin).createOrReplaceTempView("t4")

    spark.sql("select * from t4").show()
    val df: DataFrame = spark.sql("select * from t4")

    //保存数据到mysql
    df.write
      .format("jdbc")
      .option("url", "jdbc:mysql://hadoop101:3306/sparksql")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "result1")
      .mode(SaveMode.Append)
      .save()

    spark.close()
  }


  case class Buffer(var total: Long, var cityMap: mutable.Map[String, Long])
  /**
   * IN:city
   * Buffer:( total, cityMap[ (city2, count), (city1, count) ] )
   * OUT:"北京21%,成都16%,其他63%"----city_remark
   */
  class Remark extends Aggregator[String,Buffer,String]{
    //初始化缓冲区
    override def zero: Buffer = {
      Buffer(0L, mutable.Map[String, Long]()) //total初始值是0，Map初始为空
    }
    //缓冲区计算/更新缓冲区数据
    override def reduce(buffer: Buffer, inCity: String): Buffer = {
      buffer.total = buffer.total + 1 //总量+1
      val newCount = buffer.cityMap.getOrElse(inCity,0L) + 1 //更新map
      buffer.cityMap.update(inCity,newCount)
      buffer
    }
    //合并缓冲区
    override def merge(buff1: Buffer, buff2: Buffer): Buffer = {
      buff1.total = buff1.total + buff2.total
      val map1 = buff1.cityMap
      val map2: mutable.Map[String, Long] = buff2.cityMap
      val cityMap: mutable.Map[String, Long] = map1.foldLeft(map2) {
        case (map, (city, count)) => {
          val newCount = map.getOrElse(city, 0L) + count
          map.update(city, newCount)
          map
        }
      }
      buff1.cityMap = cityMap
      buff1
    }
    //输出字符串：最终结果city_remark---->"北京21%,成都16%,其他63%"
    override def finish(buff: Buffer): String = {
      val city_remark: ListBuffer[String] = ListBuffer[String]()
      val total: Long = buff.total
      val map: mutable.Map[String, Long] = buff.cityMap
      val cityCountList: List[(String, Long)] = map.toList.sortWith(
        (left, right) => {
          left._2 > right._2
        }
      ).take(2)
      var rate_sum = 0L
      cityCountList.foreach{
        case (city, count) => {
          var rate = count * 100 / total
          city_remark.append(s"${city}${rate}%")
          rate_sum+=rate
        }
      }
      if (map.size >2 ){
          city_remark.append(s"其他${100 - rate_sum}%")
        }
      city_remark.mkString("，")
    }
    override def bufferEncoder: Encoder[Buffer] = Encoders.product
    override def outputEncoder: Encoder[String] = Encoders.STRING
  }



}
