package com.zhang.spark_1.spark_core.req

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title:
 * @author: zhang
 * @date: 2021/12/8 23:05 
 */
object Spark02_top10 {

  def main(args: Array[String]): Unit = {
    //获取spark的连接
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("top10")
    val sc: SparkContext = new SparkContext(conf)
    //读取数据
    val rdd: RDD[String] = sc.textFile("datas/user_visit_action.txt")
    rdd.cache()
    //统计品类的点击数量：（品类ID，点击数量）
    val clickRDD: RDD[String] = rdd.filter(
      line => {
        val datas: Array[String] = line.split("_")
        datas(6) != "-1"
      }
    )

    val clickCountRDD: RDD[(String, Int)] = clickRDD.map(
      line => {
        val datas: Array[String] = line.split("_")
        (datas(6), 1)
      }
    ).reduceByKey(_ + _)
    //categoryCount.collect().foreach(println)
    //统计下单的点击数量：（品类ID，下单数量）
    val orderRDD: RDD[String] = rdd.filter(
      line => {
        val datas: Array[String] = line.split("_")
        datas(8) != "null"
      }
    )
    val orderCountRDD = orderRDD.flatMap(
      line => {
        val datas: Array[String] = line.split("_")
        val cid = datas(8)
        val cids: Array[String] = cid.split(",")
        cids.map((_,1))
      }
    ).reduceByKey(_+_)
   // value.map((_,1)).reduceByKey(_+_).collect().foreach(println)
    //统计支付的点击数量：（品类ID，支付数量）
    val payRDD: RDD[String] = rdd.filter(
      line => {
        val datas: Array[String] = line.split("_")
        datas(10) != "null"
      }
    )
    val payCountRDD = payRDD.flatMap(
      line => {
        val datas: Array[String] = line.split("_")
        val cid = datas(10)
        val cids: Array[String] = cid.split(",")
        cids.map((_,1))
      }
    ).reduceByKey(_+_)
    //（品类ID，（点击数量,下单数量，支付数量））
    //元组排序：第一个相比较第二个以此类推
    val rdd1: RDD[(String, (Int, Int, Int))] = clickCountRDD.mapValues((_, 0, 0))
    val rdd2: RDD[(String, (Int, Int, Int))] = orderCountRDD.mapValues((0, _, 0))
    val rdd3: RDD[(String, (Int, Int, Int))] = payCountRDD.mapValues((0, 0, _))
    val unionRDD: RDD[(String, (Int, Int, Int))] = rdd1.union(rdd2).union(rdd3)

    val analysisRDD: RDD[(String, (Int, Int, Int))] = unionRDD.reduceByKey {
      case (t1, t2) => {
        (t1._1 + t2._1, t1._2 + t2._2, t1._3 + t2._3)
      }
    }

    analysisRDD.sortBy(_._2,false).take(10).foreach(println)

    sc.stop()
  }

}
