package com.atguigu.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CategoryTop10 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local").setAppName("WordCount")
    conf .set("spark.testing.memory","2147480000")
    val sc: SparkContext = new SparkContext(conf)
    val log: RDD[String] = sc.textFile("spark-core/data/user_visit_action.txt")
    val category: RDD[(String, (Int, Int, Int))] = log.flatMap(log => {
      //点击数 下单数  支付数
      val strs: Array[String] = log.split("_")
      if (strs(6) != "-1") {
        List((strs(6), (1, 0, 0)))
      }
      else if (strs(8) != "null") {
        val strings: Array[String] = strs(8).split(",")
        strings.map((_, (0, 1, 0)))
      }
      else if (strs(10) != "null") {
        val strings: Array[String] = strs(10).split(",")
        strings.map((_, (0, 0, 1)))
      }
      else {
        Nil
      }
    })
    category.reduceByKey(
      (left,right)=>(left._1+right._1,left._2+right._2,left._3+right._3)
    ).sortBy(_._2,ascending = false).take(10).foreach(println)
  }
}
