package com.yanggu.spark.core.demand.categoryTop10

import org.apache.spark.{SparkConf, SparkContext}

//统计商品热门分类的top10
//先根据点击次数、下单次数、支付次数排序
//这里使用reduceByKey算子
object CategoryCountTop10_1 {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("CategoryCountTop101").setMaster("local[*]")
    val sparkContext = new SparkContext(sparkConf)
    //需求1：Top10热门品类
    //（品类，（点击总数，下单总数，支付总数））
    sparkContext.textFile("input/user_visit_action.txt")
      //使用扁平化操作
      .flatMap(line => {
        //使用模式匹配
        line.split("_") match {
          //计算点击次数
          case Array(_, _, _, _, _, "null", click_category_id, _, "null", "null", "null", "null", _) =>
            Array((click_category_id, CategoryCount(1, 0, 0)))
          //计算下单次数
          case Array(_, _, _, _, _, "null", "-1", "-1", order_category_ids, _, "null", "null", _) =>
            order_category_ids.split(",").map(id => (id, CategoryCount(0, 1, 0)))
          //计算支付次数
          case Array(_, _, _, _, _, "null", "-1", "-1", "null", "null", pay_category_ids, _, _) =>
            pay_category_ids.split(",").map(id => (id, CategoryCount(0, 0, 1)))
          //如果是其他, 直接返回Nil
          case _ => Nil
        }
      })
      //根据分类id进行分组聚合, 累积求和
      .reduceByKey((t1, t2) => CategoryCount(t1.clickCount + t2.clickCount, t1.orderCount + t2.orderCount, t1.payCount + t2.payCount))
      //按照点击总数，下单总数，支付总数降序排序
      .sortBy(t => (t._2.clickCount, t._2.orderCount, t._2.payCount), ascending = false)
      //取得top10
      .take(10)
      .foreach(println)

  }

}

case class CategoryCount(
                          var clickCount: Long,
                          var orderCount: Long,
                          var payCount: Long
                        )
