package com.sisyphus

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}

/**
 * @author: sweetdream
 * @description:
 * Top10Category 热门品类中每个品类的 Top10Category 活跃 Session 统计
 * @date: 2022/8/14
 */
object Top10Session {
  def main(args: Array[String]): Unit = {
    // 1.enviroment
    val sparkSession = SparkSession.builder().appName("Top10Session").master("local[2]").getOrCreate()

    // 2.source
    //自定义Schema信息
    val schema = StructType(
      Array(
        // 用户点击行为的日期
        StructField("date", StringType, true),
        // 用户的 ID
        StructField("user_id", LongType, true),
        // Session 的 ID
        StructField("session_id", StringType, true),
        // 某个页面的 ID
        StructField("page_id", LongType, true),
        // 动作的时间点
        StructField("action_time", StringType, true),
        // 用户搜索的关键词
        StructField("search_keyword", StringType, true),
        // 某一个商品品类的 ID
        StructField("click_category_id", LongType, true),
        // 某一个商品的 ID
        StructField("click_product_id", LongType, true),
        // 一次订单中所有品类的 ID 集合
        StructField("order_category_ids", StringType, true),
        // 一次订单中所有商品的 ID 集合
        StructField("order_product_ids", StringType, true),
        // 一次支付中所有品类的 ID 集合
        StructField("pay_category_ids", StringType, true),
        // 一次支付中所有商品的 ID 集合
        StructField("pay_product_ids", StringType, true),
        // 城市 id
        StructField("city_id", StringType, true)
      )
    )

    val data = sparkSession.read
      .option("sep", "_")
      .schema(schema)
      .csv("D:\\BaiduNetdiskDownload\\user_visit_action.txt")

    data.createOrReplaceTempView("table")

    // 3. transformation
    // Top热门品类中每个品类 Top10 用户 session 的点击统计
    val sql =
    """
      |select click_category_id, user_id, c
      |from (
      |       select click_category_id, user_id, c,
      |              row_number() over(partition by click_category_id order by c desc) as row
      |       from (
      |              select click_category_id, user_id, count(user_id) as c
      |              from table
      |              where click_category_id in (7,20,17,4,15,11,2,12,9,16)
      |              group by click_category_id,user_id
      |            )
      |      )
      |where row <=10
      |order by click_category_id
      |""".stripMargin

    val res = sparkSession.sql(sql)

    // 4.sink
    res.show(100)

    // 5.execute
    sparkSession.stop()
  }
}
