package com.sisyphus

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType}

/**
 * @author: sweetdream
 * @description:
 * Top10Category 热门品类
 * 按照每个品类的点击、下单、支付的量来统计热门品类Top10
 * 综合排名 = 点击数*20%+下单数*30%+支付数*50%
 * @date: 2022/8/14
 */
object Top10Category {
  def main(args: Array[String]): Unit = {
    // 1.enviroment
    val sparkSession = SparkSession.builder().appName("Top10Category").master("local[2]").getOrCreate()

    // 2.source
    //自定义Schema信息
    val schema = StructType(
      Array(
        // 用户点击行为的日期
        StructField("date", StringType, true),
        // 用户的 ID
        StructField("user_id", LongType, true),
        // Session 的 ID
        StructField("session_id", StringType, true),
        // 某个页面的 ID
        StructField("page_id", LongType, true),
        // 动作的时间点
        StructField("action_time", StringType, true),
        // 用户搜索的关键词
        StructField("search_keyword", StringType, true),
        // 某一个商品品类的 ID
        StructField("click_category_id", LongType, true),
        // 某一个商品的 ID
        StructField("click_product_id", LongType, true),
        // 一次订单中所有品类的 ID 集合
        StructField("order_category_ids", StringType, true),
        // 一次订单中所有商品的 ID 集合
        StructField("order_product_ids", StringType, true),
        // 一次支付中所有品类的 ID 集合
        StructField("pay_category_ids", StringType, true),
        // 一次支付中所有商品的 ID 集合
        StructField("pay_product_ids", StringType, true),
        // 城市 id
        StructField("city_id", StringType, true)
      )
    )

    val data = sparkSession.read
      .option("sep", "_")
      .schema(schema)
      .csv("D:\\BaiduNetdiskDownload\\user_visit_action.txt")

    data.createOrReplaceTempView("table")

    // 3. transformation
    // 综合排名 = 品类点击数*20% + 品类下单数*30% + 品类支付数*50%
    val clickSql = "select click_category_id as category_id, count(click_category_id) * 0.2 as num from table " +
      "where click_category_id != -1 " +
      "group by click_category_id"
    val clickRes = sparkSession.sql(clickSql)

    val orderSql = "select category_id,count(category_id) * 0.3 as num from table " +
      "lateral view explode(split(order_category_ids,',')) as category_id " +
      "where order_category_ids != 'null' " +
      "group by category_id"
    val orderRes = sparkSession.sql(orderSql)

    val paySql = "select category_id,count(category_id) * 0.5 as num from table " +
      "lateral view explode(split(pay_category_ids,',')) as category_id " +
      "where pay_category_ids != 'null' " +
      "group by category_id"
    val payRes = sparkSession.sql(paySql)

    var res = clickRes.union(orderRes).union(payRes)
      .groupBy("category_id")
      .sum("num")

    res = res.sort(res.col("sum(num)").desc)

    // 4.sink
    res.show(10)

    // 5.execute
    sparkSession.stop()
  }
}
