package day04.demo

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author wsl
 * @Description
 * 热门商品topN  点击次数、下单次数、支付次数
 *
 *
 */
object TopN_Hot {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("rdd").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd: RDD[String] = sc.textFile("sparkcore/input/user_visit_action.txt")

    //优化提前缓存
    rdd.cache()

    val clickRdd: RDD[(String, Int)] = rdd.filter(
      click => {
        val data: Array[String] = click.split("_")
        data(6) != "-1"
      }
    ).map(
      click => {
        val data: Array[String] = click.split("_")
        (data(6), 1)
      }
    ).reduceByKey(_ + _)

    val orderRdd: RDD[(String, Int)] = rdd.filter(
      click => {
        val data: Array[String] = click.split("_")
        data(8) != "null"
      }
    ).flatMap(
      click => {
        val data: Array[String] = click.split("_")
        val arr: Array[String] = data(8).split(",")
        arr.map((_, 1))
      }
    ).reduceByKey(_ + _)

    val payRdd: RDD[(String, Int)] = rdd.filter(
      click => {
        val data: Array[String] = click.split("_")
        data(10) != "null"
      }
    ).flatMap(
      click => {
        val data: Array[String] = click.split("_")
        val arr: Array[String] = data(10).split(",")
        arr.map((_, 1))
      }
    ).reduceByKey(_ + _)


    clickRdd.cogroup(orderRdd, payRdd)
      .mapValues {
        case (clickIter, orderIter, payIter) => {
          (clickIter.toList.max, orderIter.toList.max, payIter.toList.max)
        }
      }
      .sortBy(_._2, false).take(10)
      .foreach(println)

    sc.stop()

  }
}
